diff --git a/.husky/pre-commit b/.husky/pre-commit index fad85ff..ea5a55b 100755 --- a/.husky/pre-commit +++ b/.husky/pre-commit @@ -1,4 +1 @@ -#!/usr/bin/env sh -. "$(dirname -- "$0")/_/husky.sh" - bunx lint-staged diff --git a/.prettierrc.json b/.prettierrc similarity index 100% rename from .prettierrc.json rename to .prettierrc diff --git a/bun.lockb b/bun.lockb index de053b9..92c3d38 100755 Binary files a/bun.lockb and b/bun.lockb differ diff --git a/dist/main.cjs b/dist/main.cjs index 9254088..24270fe 100644 --- a/dist/main.cjs +++ b/dist/main.cjs @@ -810,7 +810,7 @@ var require_util = __commonJS({ var { InvalidArgumentError } = require_errors(); var { Blob: Blob4 } = require("buffer"); var nodeUtil = require("util"); - var { stringify: stringify2 } = require("querystring"); + var { stringify } = require("querystring"); var { headerNameLowerCasedRecord } = require_constants(); var [nodeMajor, nodeMinor] = process.versions.node.split(".").map((v) => Number(v)); function nop() { @@ -825,7 +825,7 @@ var require_util = __commonJS({ if (url.includes("?") || url.includes("#")) { throw new Error('Query params cannot be passed when url already contains "?" or "#".'); } - const stringified = stringify2(queryParams); + const stringified = stringify(queryParams); if (stringified) { url += "?" + stringified; } @@ -888,8 +888,7 @@ var require_util = __commonJS({ return host.substring(1, idx2); } const idx = host.indexOf(":"); - if (idx === -1) - return host; + if (idx === -1) return host; return host.substring(0, idx); } function getServerName(host) { @@ -959,8 +958,7 @@ var require_util = __commonJS({ return headerNameLowerCasedRecord[value] || value.toLowerCase(); } function parseHeaders(headers, obj = {}) { - if (!Array.isArray(headers)) - return headers; + if (!Array.isArray(headers)) return headers; for (let i2 = 0; i2 < headers.length; i2 += 2) { const key = headers[i2].toString().toLowerCase(); let val = obj[key]; @@ -1007,31 +1005,31 @@ var require_util = __commonJS({ function isBuffer(buffer) { return buffer instanceof Uint8Array || Buffer.isBuffer(buffer); } - function validateHandler(handler2, method, upgrade) { - if (!handler2 || typeof handler2 !== "object") { + function validateHandler(handler3, method, upgrade) { + if (!handler3 || typeof handler3 !== "object") { throw new InvalidArgumentError("handler must be an object"); } - if (typeof handler2.onConnect !== "function") { + if (typeof handler3.onConnect !== "function") { throw new InvalidArgumentError("invalid onConnect method"); } - if (typeof handler2.onError !== "function") { + if (typeof handler3.onError !== "function") { throw new InvalidArgumentError("invalid onError method"); } - if (typeof handler2.onBodySent !== "function" && handler2.onBodySent !== void 0) { + if (typeof handler3.onBodySent !== "function" && handler3.onBodySent !== void 0) { throw new InvalidArgumentError("invalid onBodySent method"); } if (upgrade || method === "CONNECT") { - if (typeof handler2.onUpgrade !== "function") { + if (typeof handler3.onUpgrade !== "function") { throw new InvalidArgumentError("invalid onUpgrade method"); } } else { - if (typeof handler2.onHeaders !== "function") { + if (typeof handler3.onHeaders !== "function") { throw new InvalidArgumentError("invalid onHeaders method"); } - if (typeof handler2.onData !== "function") { + if (typeof handler3.onData !== "function") { throw new InvalidArgumentError("invalid onData method"); } - if (typeof handler2.onComplete !== "function") { + if (typeof handler3.onComplete !== "function") { throw new InvalidArgumentError("invalid onComplete method"); } } @@ -1074,14 +1072,14 @@ var require_util = __commonJS({ if (ReadableStream2.from) { return ReadableStream2.from(convertIterableToBuffer(iterable)); } - let iterator2; + let iterator3; return new ReadableStream2( { async start() { - iterator2 = iterable[Symbol.asyncIterator](); + iterator3 = iterable[Symbol.asyncIterator](); }, async pull(controller) { - const { done, value } = await iterator2.next(); + const { done, value } = await iterator3.next(); if (done) { queueMicrotask(() => { controller.close(); @@ -1093,7 +1091,7 @@ var require_util = __commonJS({ return controller.desiredSize > 0; }, async cancel(reason) { - await iterator2.return(); + await iterator3.return(); } }, 0 @@ -1134,8 +1132,7 @@ var require_util = __commonJS({ return `${val}`; } function parseRangeHeader(range) { - if (range == null || range === "") - return { start: 0, end: null, size: null }; + if (range == null || range === "") return { start: 0, end: null, size: null }; const m2 = range ? range.match(/^bytes (\d+)-(\d+)\/(\d+)?$/) : null; return m2 ? { start: parseInt(m2[1]), @@ -1794,6 +1791,7 @@ var require_decodeText = __commonJS({ return decoders.utf8; case "latin1": case "ascii": + // TODO: Make these a separate, strict decoder? case "us-ascii": case "iso-8859-1": case "iso8859-1": @@ -2493,6 +2491,7 @@ var require_basename = __commonJS({ for (var i2 = path2.length - 1; i2 >= 0; --i2) { switch (path2.charCodeAt(i2)) { case 47: + // '/' case 92: path2 = path2.slice(i2 + 1); return path2 === ".." || path2 === "." ? "" : path2; @@ -3502,11 +3501,11 @@ var require_util2 = __commonJS({ var assert = require("assert"); var { isUint8Array } = require("util/types"); var supportedHashes = []; - var crypto4; + var crypto; try { - crypto4 = require("crypto"); + crypto = require("crypto"); const possibleRelevantHashes = ["sha256", "sha384", "sha512"]; - supportedHashes = crypto4.getHashes().filter((hash) => possibleRelevantHashes.includes(hash)); + supportedHashes = crypto.getHashes().filter((hash) => possibleRelevantHashes.includes(hash)); } catch { } function responseURL(response) { @@ -3527,11 +3526,11 @@ var require_util2 = __commonJS({ } return location; } - function requestCurrentURL(request2) { - return request2.urlList[request2.urlList.length - 1]; + function requestCurrentURL(request3) { + return request3.urlList[request3.urlList.length - 1]; } - function requestBadPort(request2) { - const url = requestCurrentURL(request2); + function requestBadPort(request3) { + const url = requestCurrentURL(request3); if (urlIsHttpHttpsScheme(url) && badPortsSet.has(url.port)) { return "blocked"; } @@ -3598,7 +3597,7 @@ var require_util2 = __commonJS({ } return true; } - function setRequestReferrerPolicyOnRedirect(request2, actualResponse) { + function setRequestReferrerPolicyOnRedirect(request3, actualResponse) { const { headersList } = actualResponse; const policyHeader = (headersList.get("referrer-policy") ?? "").split(","); let policy = ""; @@ -3612,7 +3611,7 @@ var require_util2 = __commonJS({ } } if (policy !== "") { - request2.referrerPolicy = policy; + request3.referrerPolicy = policy; } } function crossOriginResourcePolicyCheck() { @@ -3629,33 +3628,33 @@ var require_util2 = __commonJS({ header = httpRequest.mode; httpRequest.headersList.set("sec-fetch-mode", header); } - function appendRequestOriginHeader(request2) { - let serializedOrigin = request2.origin; - if (request2.responseTainting === "cors" || request2.mode === "websocket") { + function appendRequestOriginHeader(request3) { + let serializedOrigin = request3.origin; + if (request3.responseTainting === "cors" || request3.mode === "websocket") { if (serializedOrigin) { - request2.headersList.append("origin", serializedOrigin); + request3.headersList.append("origin", serializedOrigin); } - } else if (request2.method !== "GET" && request2.method !== "HEAD") { - switch (request2.referrerPolicy) { + } else if (request3.method !== "GET" && request3.method !== "HEAD") { + switch (request3.referrerPolicy) { case "no-referrer": serializedOrigin = null; break; case "no-referrer-when-downgrade": case "strict-origin": case "strict-origin-when-cross-origin": - if (request2.origin && urlHasHttpsScheme(request2.origin) && !urlHasHttpsScheme(requestCurrentURL(request2))) { + if (request3.origin && urlHasHttpsScheme(request3.origin) && !urlHasHttpsScheme(requestCurrentURL(request3))) { serializedOrigin = null; } break; case "same-origin": - if (!sameOrigin(request2, requestCurrentURL(request2))) { + if (!sameOrigin(request3, requestCurrentURL(request3))) { serializedOrigin = null; } break; default: } if (serializedOrigin) { - request2.headersList.append("origin", serializedOrigin); + request3.headersList.append("origin", serializedOrigin); } } } @@ -3687,26 +3686,26 @@ var require_util2 = __commonJS({ referrerPolicy: policyContainer.referrerPolicy }; } - function determineRequestsReferrer2(request2) { - const policy = request2.referrerPolicy; + function determineRequestsReferrer2(request3) { + const policy = request3.referrerPolicy; assert(policy); let referrerSource = null; - if (request2.referrer === "client") { + if (request3.referrer === "client") { const globalOrigin = getGlobalOrigin(); if (!globalOrigin || globalOrigin.origin === "null") { return "no-referrer"; } referrerSource = new URL(globalOrigin); - } else if (request2.referrer instanceof URL) { - referrerSource = request2.referrer; + } else if (request3.referrer instanceof URL) { + referrerSource = request3.referrer; } let referrerURL = stripURLForReferrer(referrerSource); const referrerOrigin = stripURLForReferrer(referrerSource, true); if (referrerURL.toString().length > 4096) { referrerURL = referrerOrigin; } - const areSameOrigin = sameOrigin(request2, referrerURL); - const isNonPotentiallyTrustWorthy = isURLPotentiallyTrustworthy(referrerURL) && !isURLPotentiallyTrustworthy(request2.url); + const areSameOrigin = sameOrigin(request3, referrerURL); + const isNonPotentiallyTrustWorthy = isURLPotentiallyTrustworthy(referrerURL) && !isURLPotentiallyTrustworthy(request3.url); switch (policy) { case "origin": return referrerOrigin != null ? referrerOrigin : stripURLForReferrer(referrerSource, true); @@ -3717,7 +3716,7 @@ var require_util2 = __commonJS({ case "origin-when-cross-origin": return areSameOrigin ? referrerURL : referrerOrigin; case "strict-origin-when-cross-origin": { - const currentURL = requestCurrentURL(request2); + const currentURL = requestCurrentURL(request3); if (sameOrigin(referrerURL, currentURL)) { return referrerURL; } @@ -3727,7 +3726,21 @@ var require_util2 = __commonJS({ return referrerOrigin; } case "strict-origin": + // eslint-disable-line + /** + * 1. If referrerURL is a potentially trustworthy URL and + * request’s current URL is not a potentially trustworthy URL, + * then return no referrer. + * 2. Return referrerOrigin + */ case "no-referrer-when-downgrade": + // eslint-disable-line + /** + * 1. If referrerURL is a potentially trustworthy URL and + * request’s current URL is not a potentially trustworthy URL, + * then return no referrer. + * 2. Return referrerOrigin + */ default: return isNonPotentiallyTrustWorthy ? "no-referrer" : referrerOrigin; } @@ -3753,14 +3766,11 @@ var require_util2 = __commonJS({ if (url.href === "about:blank" || url.href === "about:srcdoc") { return true; } - if (url.protocol === "data:") - return true; - if (url.protocol === "file:") - return true; + if (url.protocol === "data:") return true; + if (url.protocol === "file:") return true; return isOriginPotentiallyTrustworthy2(url.origin); function isOriginPotentiallyTrustworthy2(origin) { - if (origin == null || origin === "null") - return false; + if (origin == null || origin === "null") return false; const originAsURL = new URL(origin); if (originAsURL.protocol === "https:" || originAsURL.protocol === "wss:") { return true; @@ -3772,7 +3782,7 @@ var require_util2 = __commonJS({ } } function bytesMatch(bytes, metadataList) { - if (crypto4 === void 0) { + if (crypto === void 0) { return true; } const parsedMetadata = parseMetadata(metadataList); @@ -3787,7 +3797,7 @@ var require_util2 = __commonJS({ for (const item of metadata) { const algorithm = item.algo; const expectedValue = item.hash; - let actualValue = crypto4.createHash(algorithm).update(bytes).digest("base64"); + let actualValue = crypto.createHash(algorithm).update(bytes).digest("base64"); if (actualValue[actualValue.length - 1] === "=") { if (actualValue[actualValue.length - 2] === "=") { actualValue = actualValue.slice(0, -2); @@ -3866,7 +3876,7 @@ var require_util2 = __commonJS({ } return true; } - function tryUpgradeRequestToAPotentiallyTrustworthyURL(request2) { + function tryUpgradeRequestToAPotentiallyTrustworthyURL(request3) { } function sameOrigin(A2, B) { if (A2.origin === B.origin && A2.origin === "null") { @@ -3919,11 +3929,11 @@ var require_util2 = __commonJS({ return result; } var esIteratorPrototype = Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]())); - function makeIterator(iterator2, name, kind) { + function makeIterator(iterator3, name, kind) { const object = { index: 0, kind, - target: iterator2 + target: iterator3 }; const i2 = { next() { @@ -4731,12 +4741,10 @@ var require_dataURL = __commonJS({ let lead = 0; let trail = str.length - 1; if (leading) { - for (; lead < str.length && isHTTPWhiteSpace(str[lead]); lead++) - ; + for (; lead < str.length && isHTTPWhiteSpace(str[lead]); lead++) ; } if (trailing) { - for (; trail > 0 && isHTTPWhiteSpace(str[trail]); trail--) - ; + for (; trail > 0 && isHTTPWhiteSpace(str[trail]); trail--) ; } return str.slice(lead, trail + 1); } @@ -4747,12 +4755,10 @@ var require_dataURL = __commonJS({ let lead = 0; let trail = str.length - 1; if (leading) { - for (; lead < str.length && isASCIIWhitespace(str[lead]); lead++) - ; + for (; lead < str.length && isASCIIWhitespace(str[lead]); lead++) ; } if (trailing) { - for (; trail > 0 && isASCIIWhitespace(str[trail]); trail--) - ; + for (; trail > 0 && isASCIIWhitespace(str[trail]); trail--) ; } return str.slice(lead, trail + 1); } @@ -5246,13 +5252,13 @@ Content-Type: ${value.type || "application/octet-stream"}\r length = Buffer.byteLength(source); } if (action != null) { - let iterator2; + let iterator3; stream = new ReadableStream2({ async start() { - iterator2 = action(object)[Symbol.asyncIterator](); + iterator3 = action(object)[Symbol.asyncIterator](); }, async pull(controller) { - const { value, done } = await iterator2.next(); + const { value, done } = await iterator3.next(); if (done) { queueMicrotask(() => { controller.close(); @@ -5265,7 +5271,7 @@ Content-Type: ${value.type || "application/octet-stream"}\r return controller.desiredSize > 0; }, async cancel(reason) { - await iterator2.return(); + await iterator3.return(); }, type: void 0 }); @@ -5346,8 +5352,7 @@ Content-Type: ${value.type || "application/octet-stream"}\r const contentType = this.headers.get("Content-Type"); if (/multipart\/form-data/.test(contentType)) { const headers = {}; - for (const [key, value] of this.headers) - headers[key.toLowerCase()] = value; + for (const [key, value] of this.headers) headers[key.toLowerCase()] = value; const responseFormData = new FormData3(); let busboy; try { @@ -5388,9 +5393,7 @@ Content-Type: ${value.type || "application/octet-stream"}\r busboy.on("finish", resolve); busboy.on("error", (err) => reject(new TypeError(err))); }); - if (this.body !== null) - for await (const chunk of consumeBody2(this[kState].body)) - busboy.write(chunk); + if (this.body !== null) for await (const chunk of consumeBody2(this[kState].body)) busboy.write(chunk); busboy.end(); await busboyResolve; return responseFormData; @@ -5531,7 +5534,7 @@ var require_request = __commonJS({ reset, throwOnError, expectContinue - }, handler2) { + }, handler3) { if (typeof path2 !== "string") { throw new InvalidArgumentError("path must be a string"); } else if (path2[0] !== "/" && !(path2.startsWith("http://") || path2.startsWith("https://")) && method !== "CONNECT") { @@ -5645,9 +5648,9 @@ var require_request = __commonJS({ this.headers += `content-type: ${body.type}\r `; } - util.validateHandler(handler2, method, upgrade); + util.validateHandler(handler3, method, upgrade); this.servername = util.getServerName(this.host); - this[kHandler] = handler2; + this[kHandler] = handler3; if (channels.create.hasSubscribers) { channels.create.publish({ request: this }); } @@ -5749,43 +5752,40 @@ var require_request = __commonJS({ processHeader(this, key, value); return this; } - static [kHTTP1BuildRequest](origin, opts, handler2) { - return new _Request(origin, opts, handler2); + static [kHTTP1BuildRequest](origin, opts, handler3) { + return new _Request(origin, opts, handler3); } - static [kHTTP2BuildRequest](origin, opts, handler2) { + static [kHTTP2BuildRequest](origin, opts, handler3) { const headers = opts.headers; opts = { ...opts, headers: null }; - const request2 = new _Request(origin, opts, handler2); - request2.headers = {}; + const request3 = new _Request(origin, opts, handler3); + request3.headers = {}; if (Array.isArray(headers)) { if (headers.length % 2 !== 0) { throw new InvalidArgumentError("headers array must be even"); } for (let i2 = 0; i2 < headers.length; i2 += 2) { - processHeader(request2, headers[i2], headers[i2 + 1], true); + processHeader(request3, headers[i2], headers[i2 + 1], true); } } else if (headers && typeof headers === "object") { const keys = Object.keys(headers); for (let i2 = 0; i2 < keys.length; i2++) { const key = keys[i2]; - processHeader(request2, key, headers[key], true); + processHeader(request3, key, headers[key], true); } } else if (headers != null) { throw new InvalidArgumentError("headers must be an object or an array"); } - return request2; + return request3; } static [kHTTP2CopyHeaders](raw) { const rawHeaders = raw.split("\r\n"); const headers = {}; for (const header of rawHeaders) { const [key, value] = header.split(": "); - if (value == null || value.length === 0) - continue; - if (headers[key]) - headers[key] += `,${value}`; - else - headers[key] = value; + if (value == null || value.length === 0) continue; + if (headers[key]) headers[key] += `,${value}`; + else headers[key] = value; } return headers; } @@ -5801,28 +5801,26 @@ var require_request = __commonJS({ return skipAppend ? val : `${key}: ${val}\r `; } - function processHeader(request2, key, val, skipAppend = false) { + function processHeader(request3, key, val, skipAppend = false) { if (val && (typeof val === "object" && !Array.isArray(val))) { throw new InvalidArgumentError(`invalid ${key} header`); } else if (val === void 0) { return; } - if (request2.host === null && key.length === 4 && key.toLowerCase() === "host") { + if (request3.host === null && key.length === 4 && key.toLowerCase() === "host") { if (headerCharRegex.exec(val) !== null) { throw new InvalidArgumentError(`invalid ${key} header`); } - request2.host = val; - } else if (request2.contentLength === null && key.length === 14 && key.toLowerCase() === "content-length") { - request2.contentLength = parseInt(val, 10); - if (!Number.isFinite(request2.contentLength)) { + request3.host = val; + } else if (request3.contentLength === null && key.length === 14 && key.toLowerCase() === "content-length") { + request3.contentLength = parseInt(val, 10); + if (!Number.isFinite(request3.contentLength)) { throw new InvalidArgumentError("invalid content-length header"); } - } else if (request2.contentType === null && key.length === 12 && key.toLowerCase() === "content-type") { - request2.contentType = val; - if (skipAppend) - request2.headers[key] = processHeaderValue(key, val, skipAppend); - else - request2.headers += processHeaderValue(key, val); + } else if (request3.contentType === null && key.length === 12 && key.toLowerCase() === "content-type") { + request3.contentType = val; + if (skipAppend) request3.headers[key] = processHeaderValue(key, val, skipAppend); + else request3.headers += processHeaderValue(key, val); } else if (key.length === 17 && key.toLowerCase() === "transfer-encoding") { throw new InvalidArgumentError("invalid transfer-encoding header"); } else if (key.length === 10 && key.toLowerCase() === "connection") { @@ -5830,7 +5828,7 @@ var require_request = __commonJS({ if (value !== "close" && value !== "keep-alive") { throw new InvalidArgumentError("invalid connection header"); } else if (value === "close") { - request2.reset = true; + request3.reset = true; } } else if (key.length === 10 && key.toLowerCase() === "keep-alive") { throw new InvalidArgumentError("invalid keep-alive header"); @@ -5844,19 +5842,15 @@ var require_request = __commonJS({ if (Array.isArray(val)) { for (let i2 = 0; i2 < val.length; i2++) { if (skipAppend) { - if (request2.headers[key]) - request2.headers[key] += `,${processHeaderValue(key, val[i2], skipAppend)}`; - else - request2.headers[key] = processHeaderValue(key, val[i2], skipAppend); + if (request3.headers[key]) request3.headers[key] += `,${processHeaderValue(key, val[i2], skipAppend)}`; + else request3.headers[key] = processHeaderValue(key, val[i2], skipAppend); } else { - request2.headers += processHeaderValue(key, val[i2]); + request3.headers += processHeaderValue(key, val[i2]); } } } else { - if (skipAppend) - request2.headers[key] = processHeaderValue(key, val, skipAppend); - else - request2.headers += processHeaderValue(key, val); + if (skipAppend) request3.headers[key] = processHeaderValue(key, val, skipAppend); + else request3.headers += processHeaderValue(key, val); } } } @@ -6007,20 +6001,20 @@ var require_dispatcher_base = __commonJS({ queueMicrotask(onDestroyed); }); } - [kInterceptedDispatch](opts, handler2) { + [kInterceptedDispatch](opts, handler3) { if (!this[kInterceptors] || this[kInterceptors].length === 0) { this[kInterceptedDispatch] = this[kDispatch]; - return this[kDispatch](opts, handler2); + return this[kDispatch](opts, handler3); } let dispatch = this[kDispatch].bind(this); for (let i2 = this[kInterceptors].length - 1; i2 >= 0; i2--) { dispatch = this[kInterceptors][i2](dispatch); } this[kInterceptedDispatch] = dispatch; - return dispatch(opts, handler2); + return dispatch(opts, handler3); } - dispatch(opts, handler2) { - if (!handler2 || typeof handler2 !== "object") { + dispatch(opts, handler3) { + if (!handler3 || typeof handler3 !== "object") { throw new InvalidArgumentError("handler must be an object"); } try { @@ -6033,12 +6027,12 @@ var require_dispatcher_base = __commonJS({ if (this[kClosed]) { throw new ClientClosedError(); } - return this[kInterceptedDispatch](opts, handler2); + return this[kInterceptedDispatch](opts, handler3); } catch (err) { - if (typeof handler2.onError !== "function") { + if (typeof handler3.onError !== "function") { throw new InvalidArgumentError("invalid onError method"); } - handler2.onError(err); + handler3.onError(err); return false; } } @@ -6567,17 +6561,17 @@ var require_RedirectHandler = __commonJS({ } }; var RedirectHandler = class { - constructor(dispatch, maxRedirections, opts, handler2) { + constructor(dispatch, maxRedirections, opts, handler3) { if (maxRedirections != null && (!Number.isInteger(maxRedirections) || maxRedirections < 0)) { throw new InvalidArgumentError("maxRedirections must be a positive number"); } - util.validateHandler(handler2, opts.method, opts.upgrade); + util.validateHandler(handler3, opts.method, opts.upgrade); this.dispatch = dispatch; this.location = null; this.abort = null; this.opts = { ...opts, maxRedirections: 0 }; this.maxRedirections = maxRedirections; - this.handler = handler2; + this.handler = handler3; this.history = []; if (util.isStream(this.opts.body)) { if (util.bodyLength(this.opts.body) === 0) { @@ -6701,12 +6695,12 @@ var require_redirectInterceptor = __commonJS({ var RedirectHandler = require_RedirectHandler(); function createRedirectInterceptor({ maxRedirections: defaultMaxRedirections }) { return (dispatch) => { - return function Intercept(opts, handler2) { + return function Intercept(opts, handler3) { const { maxRedirections = defaultMaxRedirections } = opts; if (!maxRedirections) { - return dispatch(opts, handler2); + return dispatch(opts, handler3); } - const redirectHandler = new RedirectHandler(dispatch, maxRedirections, opts, handler2); + const redirectHandler = new RedirectHandler(dispatch, maxRedirections, opts, handler3); opts = { ...opts, maxRedirections: 0 }; return dispatch(opts, redirectHandler); }; @@ -7020,12 +7014,12 @@ var require_client = __commonJS({ connect(this); this.once("connect", cb); } - [kDispatch](opts, handler2) { + [kDispatch](opts, handler3) { const origin = opts.origin || this[kUrl].origin; - const request2 = this[kHTTPConnVersion] === "h2" ? Request2[kHTTP2BuildRequest](origin, opts, handler2) : Request2[kHTTP1BuildRequest](origin, opts, handler2); - this[kQueue].push(request2); + const request3 = this[kHTTPConnVersion] === "h2" ? Request2[kHTTP2BuildRequest](origin, opts, handler3) : Request2[kHTTP1BuildRequest](origin, opts, handler3); + this[kQueue].push(request3); if (this[kResuming]) { - } else if (util.bodyLength(request2.body) == null && util.isIterable(request2.body)) { + } else if (util.bodyLength(request3.body) == null && util.isIterable(request3.body)) { this[kResuming] = 1; process.nextTick(resume, this); } else { @@ -7049,8 +7043,8 @@ var require_client = __commonJS({ return new Promise((resolve) => { const requests = this[kQueue].splice(this[kPendingIdx]); for (let i2 = 0; i2 < requests.length; i2++) { - const request2 = requests[i2]; - errorRequest(this, request2, err); + const request3 = requests[i2]; + errorRequest(this, request3, err); } const callback = () => { if (this[kClosedResolve]) { @@ -7098,13 +7092,13 @@ var require_client = __commonJS({ assert(this[kPending] === 0); const requests = client[kQueue].splice(client[kRunningIdx]); for (let i2 = 0; i2 < requests.length; i2++) { - const request2 = requests[i2]; - errorRequest(this, request2, err); + const request3 = requests[i2]; + errorRequest(this, request3, err); } } else if (client[kRunning] > 0) { - const request2 = client[kQueue][client[kRunningIdx]]; + const request3 = client[kQueue][client[kRunningIdx]]; client[kQueue][client[kRunningIdx]++] = null; - errorRequest(client, request2, err); + errorRequest(client, request3, err); } client[kPendingIdx] = client[kRunningIdx]; assert(client[kRunning] === 0); @@ -7312,8 +7306,8 @@ var require_client = __commonJS({ if (socket.destroyed) { return -1; } - const request2 = client[kQueue][client[kRunningIdx]]; - if (!request2) { + const request3 = client[kQueue][client[kRunningIdx]]; + if (!request3) { return -1; } } @@ -7353,12 +7347,12 @@ var require_client = __commonJS({ onUpgrade(head) { const { upgrade, client, socket, headers, statusCode } = this; assert(upgrade); - const request2 = client[kQueue][client[kRunningIdx]]; - assert(request2); + const request3 = client[kQueue][client[kRunningIdx]]; + assert(request3); assert(!socket.destroyed); assert(socket === client[kSocket]); assert(!this.paused); - assert(request2.upgrade || request2.method === "CONNECT"); + assert(request3.upgrade || request3.method === "CONNECT"); this.statusCode = null; this.statusText = ""; this.shouldKeepAlive = null; @@ -7375,7 +7369,7 @@ var require_client = __commonJS({ client[kQueue][client[kRunningIdx]++] = null; client.emit("disconnect", client[kUrl], [client], new InformationalError("upgrade")); try { - request2.onUpgrade(statusCode, headers, socket); + request3.onUpgrade(statusCode, headers, socket); } catch (err) { util.destroy(socket, err); } @@ -7386,8 +7380,8 @@ var require_client = __commonJS({ if (socket.destroyed) { return -1; } - const request2 = client[kQueue][client[kRunningIdx]]; - if (!request2) { + const request3 = client[kQueue][client[kRunningIdx]]; + if (!request3) { return -1; } assert(!this.upgrade); @@ -7396,23 +7390,23 @@ var require_client = __commonJS({ util.destroy(socket, new SocketError("bad response", util.getSocketInfo(socket))); return -1; } - if (upgrade && !request2.upgrade) { + if (upgrade && !request3.upgrade) { util.destroy(socket, new SocketError("bad upgrade", util.getSocketInfo(socket))); return -1; } assert.strictEqual(this.timeoutType, TIMEOUT_HEADERS); this.statusCode = statusCode; this.shouldKeepAlive = shouldKeepAlive || // Override llhttp value which does not allow keepAlive for HEAD. - request2.method === "HEAD" && !socket[kReset] && this.connection.toLowerCase() === "keep-alive"; + request3.method === "HEAD" && !socket[kReset] && this.connection.toLowerCase() === "keep-alive"; if (this.statusCode >= 200) { - const bodyTimeout = request2.bodyTimeout != null ? request2.bodyTimeout : client[kBodyTimeout]; + const bodyTimeout = request3.bodyTimeout != null ? request3.bodyTimeout : client[kBodyTimeout]; this.setTimeout(bodyTimeout, TIMEOUT_BODY); } else if (this.timeout) { if (this.timeout.refresh) { this.timeout.refresh(); } } - if (request2.method === "CONNECT") { + if (request3.method === "CONNECT") { assert(client[kRunning] === 1); this.upgrade = true; return 2; @@ -7443,11 +7437,11 @@ var require_client = __commonJS({ } else { socket[kReset] = true; } - const pause = request2.onHeaders(statusCode, headers, this.resume, statusText) === false; - if (request2.aborted) { + const pause = request3.onHeaders(statusCode, headers, this.resume, statusText) === false; + if (request3.aborted) { return -1; } - if (request2.method === "HEAD") { + if (request3.method === "HEAD") { return 1; } if (statusCode < 200) { @@ -7464,8 +7458,8 @@ var require_client = __commonJS({ if (socket.destroyed) { return -1; } - const request2 = client[kQueue][client[kRunningIdx]]; - assert(request2); + const request3 = client[kQueue][client[kRunningIdx]]; + assert(request3); assert.strictEqual(this.timeoutType, TIMEOUT_BODY); if (this.timeout) { if (this.timeout.refresh) { @@ -7478,7 +7472,7 @@ var require_client = __commonJS({ return -1; } this.bytesRead += buf.length; - if (request2.onData(buf) === false) { + if (request3.onData(buf) === false) { return constants.ERROR.PAUSED; } } @@ -7490,8 +7484,8 @@ var require_client = __commonJS({ if (upgrade) { return; } - const request2 = client[kQueue][client[kRunningIdx]]; - assert(request2); + const request3 = client[kQueue][client[kRunningIdx]]; + assert(request3); assert(statusCode >= 100); this.statusCode = null; this.statusText = ""; @@ -7505,11 +7499,11 @@ var require_client = __commonJS({ if (statusCode < 200) { return; } - if (request2.method !== "HEAD" && contentLength && bytesRead !== parseInt(contentLength, 10)) { + if (request3.method !== "HEAD" && contentLength && bytesRead !== parseInt(contentLength, 10)) { util.destroy(socket, new ResponseContentLengthMismatchError()); return -1; } - request2.onComplete(headers); + request3.onComplete(headers); client[kQueue][client[kRunningIdx]++] = null; if (socket[kWriting]) { assert.strictEqual(client[kRunning], 0); @@ -7528,15 +7522,15 @@ var require_client = __commonJS({ } } }; - function onParserTimeout(parser3) { - const { socket, timeoutType, client } = parser3; + function onParserTimeout(parser4) { + const { socket, timeoutType, client } = parser4; if (timeoutType === TIMEOUT_HEADERS) { if (!socket[kWriting] || socket.writableNeedDrain || client[kRunning] > 1) { - assert(!parser3.paused, "cannot be paused while waiting for headers"); + assert(!parser4.paused, "cannot be paused while waiting for headers"); util.destroy(socket, new HeadersTimeoutError()); } } else if (timeoutType === TIMEOUT_BODY) { - if (!parser3.paused) { + if (!parser4.paused) { util.destroy(socket, new BodyTimeoutError()); } } else if (timeoutType === TIMEOUT_IDLE) { @@ -7545,17 +7539,17 @@ var require_client = __commonJS({ } } function onSocketReadable() { - const { [kParser]: parser3 } = this; - if (parser3) { - parser3.readMore(); + const { [kParser]: parser4 } = this; + if (parser4) { + parser4.readMore(); } } function onSocketError(err) { - const { [kClient]: client, [kParser]: parser3 } = this; + const { [kClient]: client, [kParser]: parser4 } = this; assert(err.code !== "ERR_TLS_CERT_ALTNAME_INVALID"); if (client[kHTTPConnVersion] !== "h2") { - if (err.code === "ECONNRESET" && parser3.statusCode && !parser3.shouldKeepAlive) { - parser3.onMessageComplete(); + if (err.code === "ECONNRESET" && parser4.statusCode && !parser4.shouldKeepAlive) { + parser4.onMessageComplete(); return; } } @@ -7567,27 +7561,27 @@ var require_client = __commonJS({ assert(client[kPendingIdx] === client[kRunningIdx]); const requests = client[kQueue].splice(client[kRunningIdx]); for (let i2 = 0; i2 < requests.length; i2++) { - const request2 = requests[i2]; - errorRequest(client, request2, err); + const request3 = requests[i2]; + errorRequest(client, request3, err); } assert(client[kSize] === 0); } } function onSocketEnd() { - const { [kParser]: parser3, [kClient]: client } = this; + const { [kParser]: parser4, [kClient]: client } = this; if (client[kHTTPConnVersion] !== "h2") { - if (parser3.statusCode && !parser3.shouldKeepAlive) { - parser3.onMessageComplete(); + if (parser4.statusCode && !parser4.shouldKeepAlive) { + parser4.onMessageComplete(); return; } } util.destroy(this, new SocketError("other side closed", util.getSocketInfo(this))); } function onSocketClose() { - const { [kClient]: client, [kParser]: parser3 } = this; - if (client[kHTTPConnVersion] === "h1" && parser3) { - if (!this[kError] && parser3.statusCode && !parser3.shouldKeepAlive) { - parser3.onMessageComplete(); + const { [kClient]: client, [kParser]: parser4 } = this; + if (client[kHTTPConnVersion] === "h1" && parser4) { + if (!this[kError] && parser4.statusCode && !parser4.shouldKeepAlive) { + parser4.onMessageComplete(); } this[kParser].destroy(); this[kParser] = null; @@ -7598,13 +7592,13 @@ var require_client = __commonJS({ assert(client[kPending] === 0); const requests = client[kQueue].splice(client[kRunningIdx]); for (let i2 = 0; i2 < requests.length; i2++) { - const request2 = requests[i2]; - errorRequest(client, request2, err); + const request3 = requests[i2]; + errorRequest(client, request3, err); } } else if (client[kRunning] > 0 && err.code !== "UND_ERR_INFO") { - const request2 = client[kQueue][client[kRunningIdx]]; + const request3 = client[kQueue][client[kRunningIdx]]; client[kQueue][client[kRunningIdx]++] = null; - errorRequest(client, request2, err); + errorRequest(client, request3, err); } client[kPendingIdx] = client[kRunningIdx]; assert(client[kRunning] === 0); @@ -7737,8 +7731,8 @@ var require_client = __commonJS({ if (err.code === "ERR_TLS_CERT_ALTNAME_INVALID") { assert(client[kRunning] === 0); while (client[kPending] > 0 && client[kQueue][client[kPendingIdx]].servername === client[kServerName]) { - const request2 = client[kQueue][client[kPendingIdx]++]; - errorRequest(client, request2, err); + const request3 = client[kQueue][client[kPendingIdx]++]; + errorRequest(client, request3, err); } } else { onError2(client, err); @@ -7792,8 +7786,8 @@ var require_client = __commonJS({ } } else if (client[kRunning] > 0 && socket[kParser].statusCode < 200) { if (socket[kParser].timeoutType !== TIMEOUT_HEADERS) { - const request3 = client[kQueue][client[kRunningIdx]]; - const headersTimeout = request3.headersTimeout != null ? request3.headersTimeout : client[kHeadersTimeout]; + const request4 = client[kQueue][client[kRunningIdx]]; + const headersTimeout = request4.headersTimeout != null ? request4.headersTimeout : client[kHeadersTimeout]; socket[kParser].setTimeout(headersTimeout, TIMEOUT_HEADERS); } } @@ -7815,13 +7809,13 @@ var require_client = __commonJS({ if (client[kRunning] >= (client[kPipelining] || 1)) { return; } - const request2 = client[kQueue][client[kPendingIdx]]; - if (client[kUrl].protocol === "https:" && client[kServerName] !== request2.servername) { + const request3 = client[kQueue][client[kPendingIdx]]; + if (client[kUrl].protocol === "https:" && client[kServerName] !== request3.servername) { if (client[kRunning] > 0) { return; } - client[kServerName] = request2.servername; - if (socket && socket.servername !== request2.servername) { + client[kServerName] = request3.servername; + if (socket && socket.servername !== request3.servername) { util.destroy(socket, new InformationalError("servername changed")); return; } @@ -7836,16 +7830,16 @@ var require_client = __commonJS({ if (socket.destroyed || socket[kWriting] || socket[kReset] || socket[kBlocking]) { return; } - if (client[kRunning] > 0 && !request2.idempotent) { + if (client[kRunning] > 0 && !request3.idempotent) { return; } - if (client[kRunning] > 0 && (request2.upgrade || request2.method === "CONNECT")) { + if (client[kRunning] > 0 && (request3.upgrade || request3.method === "CONNECT")) { return; } - if (client[kRunning] > 0 && util.bodyLength(request2.body) !== 0 && (util.isStream(request2.body) || util.isAsyncIterable(request2.body))) { + if (client[kRunning] > 0 && util.bodyLength(request3.body) !== 0 && (util.isStream(request3.body) || util.isAsyncIterable(request3.body))) { return; } - if (!request2.aborted && write(client, request2)) { + if (!request3.aborted && write(client, request3)) { client[kPendingIdx]++; } else { client[kQueue].splice(client[kPendingIdx], 1); @@ -7855,12 +7849,12 @@ var require_client = __commonJS({ function shouldSendContentLength(method) { return method !== "GET" && method !== "HEAD" && method !== "OPTIONS" && method !== "TRACE" && method !== "CONNECT"; } - function write(client, request2) { + function write(client, request3) { if (client[kHTTPConnVersion] === "h2") { - writeH2(client, client[kHTTP2Session], request2); + writeH2(client, client[kHTTP2Session], request3); return; } - const { body, method, path: path2, host, upgrade, headers, blocking, reset } = request2; + const { body, method, path: path2, host, upgrade, headers, blocking, reset } = request3; const expectsPayload = method === "PUT" || method === "POST" || method === "PATCH"; if (body && typeof body.read === "function") { body.read(0); @@ -7868,31 +7862,31 @@ var require_client = __commonJS({ const bodyLength = util.bodyLength(body); let contentLength = bodyLength; if (contentLength === null) { - contentLength = request2.contentLength; + contentLength = request3.contentLength; } if (contentLength === 0 && !expectsPayload) { contentLength = null; } - if (shouldSendContentLength(method) && contentLength > 0 && request2.contentLength !== null && request2.contentLength !== contentLength) { + if (shouldSendContentLength(method) && contentLength > 0 && request3.contentLength !== null && request3.contentLength !== contentLength) { if (client[kStrictContentLength]) { - errorRequest(client, request2, new RequestContentLengthMismatchError()); + errorRequest(client, request3, new RequestContentLengthMismatchError()); return false; } process.emitWarning(new RequestContentLengthMismatchError()); } const socket = client[kSocket]; try { - request2.onConnect((err) => { - if (request2.aborted || request2.completed) { + request3.onConnect((err) => { + if (request3.aborted || request3.completed) { return; } - errorRequest(client, request2, err || new RequestAbortedError()); + errorRequest(client, request3, err || new RequestAbortedError()); util.destroy(socket, new InformationalError("aborted")); }); } catch (err) { - errorRequest(client, request2, err); + errorRequest(client, request3, err); } - if (request2.aborted) { + if (request3.aborted) { return false; } if (method === "HEAD") { @@ -7931,7 +7925,7 @@ upgrade: ${upgrade}\r header += headers; } if (channels.sendHeaders.hasSubscribers) { - channels.sendHeaders.publish({ request: request2, headers: header, socket }); + channels.sendHeaders.publish({ request: request3, headers: header, socket }); } if (!body || bodyLength === 0) { if (contentLength === 0) { @@ -7943,7 +7937,7 @@ upgrade: ${upgrade}\r socket.write(`${header}\r `, "latin1"); } - request2.onRequestSent(); + request3.onRequestSent(); } else if (util.isBuffer(body)) { assert(contentLength === body.byteLength, "buffer body must have content length"); socket.cork(); @@ -7952,48 +7946,46 @@ upgrade: ${upgrade}\r `, "latin1"); socket.write(body); socket.uncork(); - request2.onBodySent(body); - request2.onRequestSent(); + request3.onBodySent(body); + request3.onRequestSent(); if (!expectsPayload) { socket[kReset] = true; } } else if (util.isBlobLike(body)) { if (typeof body.stream === "function") { - writeIterable({ body: body.stream(), client, request: request2, socket, contentLength, header, expectsPayload }); + writeIterable({ body: body.stream(), client, request: request3, socket, contentLength, header, expectsPayload }); } else { - writeBlob({ body, client, request: request2, socket, contentLength, header, expectsPayload }); + writeBlob({ body, client, request: request3, socket, contentLength, header, expectsPayload }); } } else if (util.isStream(body)) { - writeStream({ body, client, request: request2, socket, contentLength, header, expectsPayload }); + writeStream({ body, client, request: request3, socket, contentLength, header, expectsPayload }); } else if (util.isIterable(body)) { - writeIterable({ body, client, request: request2, socket, contentLength, header, expectsPayload }); + writeIterable({ body, client, request: request3, socket, contentLength, header, expectsPayload }); } else { assert(false); } return true; } - function writeH2(client, session, request2) { - const { body, method, path: path2, host, upgrade, expectContinue, signal, headers: reqHeaders } = request2; + function writeH2(client, session, request3) { + const { body, method, path: path2, host, upgrade, expectContinue, signal, headers: reqHeaders } = request3; let headers; - if (typeof reqHeaders === "string") - headers = Request2[kHTTP2CopyHeaders](reqHeaders.trim()); - else - headers = reqHeaders; + if (typeof reqHeaders === "string") headers = Request2[kHTTP2CopyHeaders](reqHeaders.trim()); + else headers = reqHeaders; if (upgrade) { - errorRequest(client, request2, new Error("Upgrade not supported for H2")); + errorRequest(client, request3, new Error("Upgrade not supported for H2")); return false; } try { - request2.onConnect((err) => { - if (request2.aborted || request2.completed) { + request3.onConnect((err) => { + if (request3.aborted || request3.completed) { return; } - errorRequest(client, request2, err || new RequestAbortedError()); + errorRequest(client, request3, err || new RequestAbortedError()); }); } catch (err) { - errorRequest(client, request2, err); + errorRequest(client, request3, err); } - if (request2.aborted) { + if (request3.aborted) { return false; } let stream; @@ -8004,18 +7996,17 @@ upgrade: ${upgrade}\r session.ref(); stream = session.request(headers, { endStream: false, signal }); if (stream.id && !stream.pending) { - request2.onUpgrade(null, null, stream); + request3.onUpgrade(null, null, stream); ++h2State.openStreams; } else { stream.once("ready", () => { - request2.onUpgrade(null, null, stream); + request3.onUpgrade(null, null, stream); ++h2State.openStreams; }); } stream.once("close", () => { h2State.openStreams -= 1; - if (h2State.openStreams === 0) - session.unref(); + if (h2State.openStreams === 0) session.unref(); }); return true; } @@ -8027,14 +8018,14 @@ upgrade: ${upgrade}\r } let contentLength = util.bodyLength(body); if (contentLength == null) { - contentLength = request2.contentLength; + contentLength = request3.contentLength; } if (contentLength === 0 || !expectsPayload) { contentLength = null; } - if (shouldSendContentLength(method) && contentLength > 0 && request2.contentLength != null && request2.contentLength !== contentLength) { + if (shouldSendContentLength(method) && contentLength > 0 && request3.contentLength != null && request3.contentLength !== contentLength) { if (client[kStrictContentLength]) { - errorRequest(client, request2, new RequestContentLengthMismatchError()); + errorRequest(client, request3, new RequestContentLengthMismatchError()); return false; } process.emitWarning(new RequestContentLengthMismatchError()); @@ -8059,15 +8050,15 @@ upgrade: ${upgrade}\r ++h2State.openStreams; stream.once("response", (headers2) => { const { [HTTP2_HEADER_STATUS]: statusCode, ...realHeaders } = headers2; - if (request2.onHeaders(Number(statusCode), realHeaders, stream.resume.bind(stream), "") === false) { + if (request3.onHeaders(Number(statusCode), realHeaders, stream.resume.bind(stream), "") === false) { stream.pause(); } }); stream.once("end", () => { - request2.onComplete([]); + request3.onComplete([]); }); stream.on("data", (chunk) => { - if (request2.onData(chunk) === false) { + if (request3.onData(chunk) === false) { stream.pause(); } }); @@ -8085,7 +8076,7 @@ upgrade: ${upgrade}\r }); stream.once("frameError", (type, code) => { const err = new InformationalError(`HTTP/2: "frameError" received - type ${type}, code ${code}`); - errorRequest(client, request2, err); + errorRequest(client, request3, err); if (client[kHTTP2Session] && !client[kHTTP2Session].destroyed && !this.closed && !this.destroyed) { h2State.streams -= 1; util.destroy(stream, err); @@ -8094,20 +8085,20 @@ upgrade: ${upgrade}\r return true; function writeBodyH2() { if (!body) { - request2.onRequestSent(); + request3.onRequestSent(); } else if (util.isBuffer(body)) { assert(contentLength === body.byteLength, "buffer body must have content length"); stream.cork(); stream.write(body); stream.uncork(); stream.end(); - request2.onBodySent(body); - request2.onRequestSent(); + request3.onBodySent(body); + request3.onRequestSent(); } else if (util.isBlobLike(body)) { if (typeof body.stream === "function") { writeIterable({ client, - request: request2, + request: request3, contentLength, h2stream: stream, expectsPayload, @@ -8119,7 +8110,7 @@ upgrade: ${upgrade}\r writeBlob({ body, client, - request: request2, + request: request3, contentLength, expectsPayload, h2stream: stream, @@ -8131,7 +8122,7 @@ upgrade: ${upgrade}\r writeStream({ body, client, - request: request2, + request: request3, contentLength, expectsPayload, socket: client[kSocket], @@ -8142,7 +8133,7 @@ upgrade: ${upgrade}\r writeIterable({ body, client, - request: request2, + request: request3, contentLength, expectsPayload, header: "", @@ -8154,11 +8145,11 @@ upgrade: ${upgrade}\r } } } - function writeStream({ h2stream, body, client, request: request2, socket, contentLength, header, expectsPayload }) { + function writeStream({ h2stream, body, client, request: request3, socket, contentLength, header, expectsPayload }) { assert(contentLength !== 0 || client[kRunning] === 0, "stream body cannot be pipelined"); if (client[kHTTPConnVersion] === "h2") { let onPipeData = function(chunk) { - request2.onBodySent(chunk); + request3.onBodySent(chunk); }; const pipe = pipeline2( body, @@ -8168,7 +8159,7 @@ upgrade: ${upgrade}\r util.destroy(body, err); util.destroy(h2stream, err); } else { - request2.onRequestSent(); + request3.onRequestSent(); } } ); @@ -8180,7 +8171,7 @@ upgrade: ${upgrade}\r return; } let finished = false; - const writer = new AsyncWriter({ socket, request: request2, contentLength, client, expectsPayload, header }); + const writer = new AsyncWriter({ socket, request: request3, contentLength, client, expectsPayload, header }); const onData = function(chunk) { if (finished) { return; @@ -8236,7 +8227,7 @@ upgrade: ${upgrade}\r } socket.on("drain", onDrain).on("error", onFinished); } - async function writeBlob({ h2stream, body, client, request: request2, socket, contentLength, header, expectsPayload }) { + async function writeBlob({ h2stream, body, client, request: request3, socket, contentLength, header, expectsPayload }) { assert(contentLength === body.size, "blob body must have content length"); const isH2 = client[kHTTPConnVersion] === "h2"; try { @@ -8256,8 +8247,8 @@ upgrade: ${upgrade}\r socket.write(buffer); socket.uncork(); } - request2.onBodySent(buffer); - request2.onRequestSent(); + request3.onBodySent(buffer); + request3.onRequestSent(); if (!expectsPayload) { socket[kReset] = true; } @@ -8266,7 +8257,7 @@ upgrade: ${upgrade}\r util.destroy(isH2 ? h2stream : socket, err); } } - async function writeIterable({ h2stream, body, client, request: request2, socket, contentLength, header, expectsPayload }) { + async function writeIterable({ h2stream, body, client, request: request3, socket, contentLength, header, expectsPayload }) { assert(contentLength !== 0 || client[kRunning] === 0, "iterator body cannot be pipelined"); let callback = null; function onDrain() { @@ -8292,7 +8283,7 @@ upgrade: ${upgrade}\r throw socket[kError]; } const res = h2stream.write(chunk); - request2.onBodySent(chunk); + request3.onBodySent(chunk); if (!res) { await waitForDrain(); } @@ -8300,14 +8291,14 @@ upgrade: ${upgrade}\r } catch (err) { h2stream.destroy(err); } finally { - request2.onRequestSent(); + request3.onRequestSent(); h2stream.end(); h2stream.off("close", onDrain).off("drain", onDrain); } return; } socket.on("close", onDrain).on("drain", onDrain); - const writer = new AsyncWriter({ socket, request: request2, contentLength, client, expectsPayload, header }); + const writer = new AsyncWriter({ socket, request: request3, contentLength, client, expectsPayload, header }); try { for await (const chunk of body) { if (socket[kError]) { @@ -8325,9 +8316,9 @@ upgrade: ${upgrade}\r } } var AsyncWriter = class { - constructor({ socket, request: request2, contentLength, client, expectsPayload, header }) { + constructor({ socket, request: request3, contentLength, client, expectsPayload, header }) { this.socket = socket; - this.request = request2; + this.request = request3; this.contentLength = contentLength; this.client = client; this.bytesWritten = 0; @@ -8336,7 +8327,7 @@ upgrade: ${upgrade}\r socket[kWriting] = true; } write(chunk) { - const { socket, request: request2, contentLength, client, bytesWritten, expectsPayload, header } = this; + const { socket, request: request3, contentLength, client, bytesWritten, expectsPayload, header } = this; if (socket[kError]) { throw socket[kError]; } @@ -8375,7 +8366,7 @@ ${len.toString(16)}\r this.bytesWritten += len; const ret = socket.write(chunk); socket.uncork(); - request2.onBodySent(chunk); + request3.onBodySent(chunk); if (!ret) { if (socket[kParser].timeout && socket[kParser].timeoutType === TIMEOUT_HEADERS) { if (socket[kParser].timeout.refresh) { @@ -8386,8 +8377,8 @@ ${len.toString(16)}\r return ret; } end() { - const { socket, contentLength, client, bytesWritten, expectsPayload, header, request: request2 } = this; - request2.onRequestSent(); + const { socket, contentLength, client, bytesWritten, expectsPayload, header, request: request3 } = this; + request3.onRequestSent(); socket[kWriting] = false; if (socket[kError]) { throw socket[kError]; @@ -8430,10 +8421,10 @@ ${len.toString(16)}\r } } }; - function errorRequest(client, request2, err) { + function errorRequest(client, request3, err) { try { - request2.onError(err); - assert(request2.aborted); + request3.onError(err); + assert(request3.aborted); } catch (err2) { client.emit("error", err2); } @@ -8642,13 +8633,13 @@ var require_pool_base = __commonJS({ } return Promise.all(this[kClients].map((c) => c.destroy(err))); } - [kDispatch](opts, handler2) { + [kDispatch](opts, handler3) { const dispatcher = this[kGetDispatcher](); if (!dispatcher) { this[kNeedDrain] = true; - this[kQueue].push({ opts, handler: handler2 }); + this[kQueue].push({ opts, handler: handler3 }); this[kQueued]++; - } else if (!dispatcher.dispatch(opts, handler2)) { + } else if (!dispatcher.dispatch(opts, handler3)) { dispatcher[kNeedDrain] = true; this[kNeedDrain] = !this[kGetDispatcher](); } @@ -8797,8 +8788,7 @@ var require_balanced_pool = __commonJS({ var kMaxWeightPerServer = Symbol("kMaxWeightPerServer"); var kErrorPenalty = Symbol("kErrorPenalty"); function getGreatestCommonDivisor(a, b) { - if (b === 0) - return a; + if (b === 0) return a; return getGreatestCommonDivisor(b, a % b); } function defaultFactory(origin, opts) { @@ -9024,7 +9014,7 @@ var require_agent = __commonJS({ } return ret; } - [kDispatch](opts, handler2) { + [kDispatch](opts, handler3) { let key; if (opts.origin && (typeof opts.origin === "string" || opts.origin instanceof URL)) { key = String(opts.origin); @@ -9038,7 +9028,7 @@ var require_agent = __commonJS({ this[kClients].set(key, new WeakRef2(dispatcher)); this[kFinalizer].register(dispatcher, key); } - return dispatcher.dispatch(opts, handler2); + return dispatcher.dispatch(opts, handler3); } async [kClose]() { const closePromises = []; @@ -9080,7 +9070,7 @@ var require_readable = __commonJS({ var kBody = Symbol("kBody"); var kAbort = Symbol("abort"); var kContentType = Symbol("kContentType"); - var noop3 = () => { + var noop4 = () => { }; module2.exports = class BodyReadable extends Readable { constructor({ @@ -9202,7 +9192,7 @@ var require_readable = __commonJS({ return new Promise((resolve, reject) => { const signalListenerCleanup = signal ? util.addAbortListener(signal, () => { this.destroy(); - }) : noop3; + }) : noop4; this.on("close", function() { signalListenerCleanup(); if (signal && signal.aborted) { @@ -9210,7 +9200,7 @@ var require_readable = __commonJS({ } else { resolve(null); } - }).on("error", noop3).on("data", function(chunk) { + }).on("error", noop4).on("data", function(chunk) { limit -= chunk.length; if (limit <= 0) { this.destroy(); @@ -9542,10 +9532,10 @@ var require_api_request = __commonJS({ } } }; - function request2(opts, callback) { + function request3(opts, callback) { if (callback === void 0) { return new Promise((resolve, reject) => { - request2.call(this, opts, (err, data) => { + request3.call(this, opts, (err, data) => { return err ? reject(err) : resolve(data); }); }); @@ -9560,7 +9550,7 @@ var require_api_request = __commonJS({ queueMicrotask(() => callback(err, { opaque })); } } - module2.exports = request2; + module2.exports = request3; module2.exports.RequestHandler = RequestHandler; } }); @@ -9791,11 +9781,11 @@ var require_api_pipeline = __commonJS({ } }; var PipelineHandler = class extends AsyncResource { - constructor(opts, handler2) { + constructor(opts, handler3) { if (!opts || typeof opts !== "object") { throw new InvalidArgumentError("invalid opts"); } - if (typeof handler2 !== "function") { + if (typeof handler3 !== "function") { throw new InvalidArgumentError("invalid handler"); } const { signal, method, opaque, onInfo, responseHeaders } = opts; @@ -9811,7 +9801,7 @@ var require_api_pipeline = __commonJS({ super("UNDICI_PIPELINE"); this.opaque = opaque || null; this.responseHeaders = responseHeaders || null; - this.handler = handler2; + this.handler = handler3; this.abort = null; this.context = null; this.onInfo = onInfo || null; @@ -9864,7 +9854,7 @@ var require_api_pipeline = __commonJS({ this.context = context2; } onHeaders(statusCode, rawHeaders, resume) { - const { opaque, handler: handler2, context: context2 } = this; + const { opaque, handler: handler3, context: context2 } = this; if (statusCode < 200) { if (this.onInfo) { const headers = this.responseHeaders === "raw" ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders); @@ -9877,7 +9867,7 @@ var require_api_pipeline = __commonJS({ try { this.handler = null; const headers = this.responseHeaders === "raw" ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders); - body = this.runInAsyncScope(handler2, null, { + body = this.runInAsyncScope(handler3, null, { statusCode, headers, opaque, @@ -9924,9 +9914,9 @@ var require_api_pipeline = __commonJS({ util.destroy(ret, err); } }; - function pipeline2(opts, handler2) { + function pipeline2(opts, handler3) { try { - const pipelineHandler = new PipelineHandler(opts, handler2); + const pipelineHandler = new PipelineHandler(opts, handler3); this.dispatch({ ...opts, body: pipelineHandler.req }, pipelineHandler); return pipelineHandler.ret; } catch (err) { @@ -10274,7 +10264,7 @@ var require_mock_utils = __commonJS({ const headersMatch = matchHeaders(mockDispatch2, headers); return pathMatch && methodMatch && bodyMatch && headersMatch; } - function getResponseData2(data) { + function getResponseData3(data) { if (Buffer.isBuffer(data)) { return data; } else if (typeof data === "object") { @@ -10349,7 +10339,7 @@ var require_mock_utils = __commonJS({ } return Buffer.concat(buffers).toString("utf8"); } - function mockDispatch(opts, handler2) { + function mockDispatch(opts, handler3) { const key = buildKey(opts); const mockDispatch2 = getMockDispatch(this[kDispatches], key); mockDispatch2.timesInvoked++; @@ -10362,7 +10352,7 @@ var require_mock_utils = __commonJS({ mockDispatch2.pending = timesInvoked < times; if (error !== null) { deleteMockDispatch(this[kDispatches], key); - handler2.onError(error); + handler3.onError(error); return true; } if (typeof delay2 === "number" && delay2 > 0) { @@ -10379,13 +10369,13 @@ var require_mock_utils = __commonJS({ body.then((newData) => handleReply(mockDispatches, newData)); return; } - const responseData = getResponseData2(body); + const responseData = getResponseData3(body); const responseHeaders = generateKeyValues(headers); const responseTrailers = generateKeyValues(trailers); - handler2.abort = nop; - handler2.onHeaders(statusCode, responseHeaders, resume, getStatusText(statusCode)); - handler2.onData(Buffer.from(responseData)); - handler2.onComplete(responseTrailers); + handler3.abort = nop; + handler3.onHeaders(statusCode, responseHeaders, resume, getStatusText(statusCode)); + handler3.onData(Buffer.from(responseData)); + handler3.onComplete(responseTrailers); deleteMockDispatch(mockDispatches, key); } function resume() { @@ -10396,10 +10386,10 @@ var require_mock_utils = __commonJS({ const agent = this[kMockAgent]; const origin = this[kOrigin]; const originalDispatch = this[kOriginalDispatch]; - return function dispatch(opts, handler2) { + return function dispatch(opts, handler3) { if (agent.isMockActive) { try { - mockDispatch.call(this, opts, handler2); + mockDispatch.call(this, opts, handler3); } catch (error) { if (error instanceof MockNotMatchedError) { const netConnect = agent[kGetNetConnect](); @@ -10407,7 +10397,7 @@ var require_mock_utils = __commonJS({ throw new MockNotMatchedError(`${error.message}: subsequent request to origin ${origin} was not allowed (net.connect disabled)`); } if (checkNetConnect(netConnect, origin)) { - originalDispatch.call(this, opts, handler2); + originalDispatch.call(this, opts, handler3); } else { throw new MockNotMatchedError(`${error.message}: subsequent request to origin ${origin} was not allowed (net.connect is not enabled for this origin)`); } @@ -10416,7 +10406,7 @@ var require_mock_utils = __commonJS({ } } } else { - originalDispatch.call(this, opts, handler2); + originalDispatch.call(this, opts, handler3); } }; } @@ -10436,7 +10426,7 @@ var require_mock_utils = __commonJS({ } } module2.exports = { - getResponseData: getResponseData2, + getResponseData: getResponseData3, getMockDispatch, addMockDispatch, deleteMockDispatch, @@ -10458,7 +10448,7 @@ var require_mock_utils = __commonJS({ var require_mock_interceptor = __commonJS({ "node_modules/@actions/http-client/node_modules/undici/lib/mock/mock-interceptor.js"(exports2, module2) { "use strict"; - var { getResponseData: getResponseData2, buildKey, addMockDispatch } = require_mock_utils(); + var { getResponseData: getResponseData3, buildKey, addMockDispatch } = require_mock_utils(); var { kDispatches, kDispatchKey, @@ -10530,7 +10520,7 @@ var require_mock_interceptor = __commonJS({ this[kContentLength] = false; } createMockScopeDispatchData(statusCode, data, responseOptions = {}) { - const responseData = getResponseData2(data); + const responseData = getResponseData3(data); const contentLength = this[kContentLength] ? { "content-length": responseData.length } : {}; const headers = { ...this[kDefaultHeaders], ...contentLength, ...responseOptions.headers }; const trailers = { ...this[kDefaultTrailers], ...responseOptions.trailers }; @@ -10844,9 +10834,9 @@ var require_mock_agent = __commonJS({ } return dispatcher; } - dispatch(opts, handler2) { + dispatch(opts, handler3) { this.get(opts.origin); - return this[kAgent].dispatch(opts, handler2); + return this[kAgent].dispatch(opts, handler3); } async close() { await this[kAgent].close(); @@ -10935,7 +10925,7 @@ var require_proxy_agent = __commonJS({ "node_modules/@actions/http-client/node_modules/undici/lib/proxy-agent.js"(exports2, module2) { "use strict"; var { kProxy, kClose, kDestroy, kInterceptors } = require_symbols(); - var { URL: URL3 } = require("url"); + var { URL: URL2 } = require("url"); var Agent = require_agent(); var Pool = require_pool(); var DispatcherBase = require_dispatcher_base(); @@ -10984,7 +10974,7 @@ var require_proxy_agent = __commonJS({ this[kRequestTls] = opts.requestTls; this[kProxyTls] = opts.proxyTls; this[kProxyHeaders] = opts.headers || {}; - const resolvedUrl = new URL3(opts.uri); + const resolvedUrl = new URL2(opts.uri); const { origin, port, host, username, password } = resolvedUrl; if (opts.auth && opts.token) { throw new InvalidArgumentError("opts.auth cannot be used in combination with opts.token"); @@ -11038,8 +11028,8 @@ var require_proxy_agent = __commonJS({ } }); } - dispatch(opts, handler2) { - const { host } = new URL3(opts.origin); + dispatch(opts, handler3) { + const { host } = new URL2(opts.origin); const headers = buildHeaders(opts.headers); throwIfProxyAuthIsSent(headers); return this[kAgent].dispatch( @@ -11050,7 +11040,7 @@ var require_proxy_agent = __commonJS({ host } }, - handler2 + handler3 ); } async [kClose]() { @@ -11173,8 +11163,7 @@ var require_RetryHandler = __commonJS({ } } onBodySent(chunk) { - if (this.handler.onBodySent) - return this.handler.onBodySent(chunk); + if (this.handler.onBodySent) return this.handler.onBodySent(chunk); } static [kRetryHandlerDefaultRetry](err, { state, opts }, cb) { const { statusCode, code, headers } = err; @@ -11387,8 +11376,8 @@ var require_DecoratorHandler = __commonJS({ "node_modules/@actions/http-client/node_modules/undici/lib/handler/DecoratorHandler.js"(exports2, module2) { "use strict"; module2.exports = class DecoratorHandler { - constructor(handler2) { - this.handler = handler2; + constructor(handler3) { + this.handler = handler3; } onConnect(...args) { return this.handler.onConnect(...args); @@ -11437,10 +11426,8 @@ var require_headers = __commonJS({ function headerValueNormalize(potentialValue) { let i2 = 0; let j = potentialValue.length; - while (j > i2 && isHTTPWhiteSpaceCharCode(potentialValue.charCodeAt(j - 1))) - --j; - while (j > i2 && isHTTPWhiteSpaceCharCode(potentialValue.charCodeAt(i2))) - ++i2; + while (j > i2 && isHTTPWhiteSpaceCharCode(potentialValue.charCodeAt(j - 1))) --j; + while (j > i2 && isHTTPWhiteSpaceCharCode(potentialValue.charCodeAt(i2))) ++i2; return i2 === 0 && j === potentialValue.length ? potentialValue : potentialValue.substring(i2, j); } function fill(headers, object) { @@ -12238,7 +12225,7 @@ var require_request2 = __commonJS({ policyContainer: makePolicyContainer() } }; - let request2 = null; + let request3 = null; let fallbackMode = null; const baseUrl = this[kRealm].settingsObject.baseUrl; let signal = null; @@ -12254,17 +12241,17 @@ var require_request2 = __commonJS({ "Request cannot be constructed from a URL that includes credentials: " + input ); } - request2 = makeRequest({ urlList: [parsedURL] }); + request3 = makeRequest({ urlList: [parsedURL] }); fallbackMode = "cors"; } else { assert(input instanceof _Request); - request2 = input[kState]; + request3 = input[kState]; signal = input[kSignal]; } const origin = this[kRealm].settingsObject.origin; let window2 = "client"; - if (request2.window?.constructor?.name === "EnvironmentSettingsObject" && sameOrigin(request2.window, origin)) { - window2 = request2.window; + if (request3.window?.constructor?.name === "EnvironmentSettingsObject" && sameOrigin(request3.window, origin)) { + window2 = request3.window; } if (init.window != null) { throw new TypeError(`'window' option '${window2}' must be null`); @@ -12272,66 +12259,66 @@ var require_request2 = __commonJS({ if ("window" in init) { window2 = "no-window"; } - request2 = makeRequest({ + request3 = makeRequest({ // URL request’s URL. // undici implementation note: this is set as the first item in request's urlList in makeRequest // method request’s method. - method: request2.method, + method: request3.method, // header list A copy of request’s header list. // undici implementation note: headersList is cloned in makeRequest - headersList: request2.headersList, + headersList: request3.headersList, // unsafe-request flag Set. - unsafeRequest: request2.unsafeRequest, + unsafeRequest: request3.unsafeRequest, // client This’s relevant settings object. client: this[kRealm].settingsObject, // window window. window: window2, // priority request’s priority. - priority: request2.priority, + priority: request3.priority, // origin request’s origin. The propagation of the origin is only significant for navigation requests // being handled by a service worker. In this scenario a request can have an origin that is different // from the current client. - origin: request2.origin, + origin: request3.origin, // referrer request’s referrer. - referrer: request2.referrer, + referrer: request3.referrer, // referrer policy request’s referrer policy. - referrerPolicy: request2.referrerPolicy, + referrerPolicy: request3.referrerPolicy, // mode request’s mode. - mode: request2.mode, + mode: request3.mode, // credentials mode request’s credentials mode. - credentials: request2.credentials, + credentials: request3.credentials, // cache mode request’s cache mode. - cache: request2.cache, + cache: request3.cache, // redirect mode request’s redirect mode. - redirect: request2.redirect, + redirect: request3.redirect, // integrity metadata request’s integrity metadata. - integrity: request2.integrity, + integrity: request3.integrity, // keepalive request’s keepalive. - keepalive: request2.keepalive, + keepalive: request3.keepalive, // reload-navigation flag request’s reload-navigation flag. - reloadNavigation: request2.reloadNavigation, + reloadNavigation: request3.reloadNavigation, // history-navigation flag request’s history-navigation flag. - historyNavigation: request2.historyNavigation, + historyNavigation: request3.historyNavigation, // URL list A clone of request’s URL list. - urlList: [...request2.urlList] + urlList: [...request3.urlList] }); const initHasKey = Object.keys(init).length !== 0; if (initHasKey) { - if (request2.mode === "navigate") { - request2.mode = "same-origin"; + if (request3.mode === "navigate") { + request3.mode = "same-origin"; } - request2.reloadNavigation = false; - request2.historyNavigation = false; - request2.origin = "client"; - request2.referrer = "client"; - request2.referrerPolicy = ""; - request2.url = request2.urlList[request2.urlList.length - 1]; - request2.urlList = [request2.url]; + request3.reloadNavigation = false; + request3.historyNavigation = false; + request3.origin = "client"; + request3.referrer = "client"; + request3.referrerPolicy = ""; + request3.url = request3.urlList[request3.urlList.length - 1]; + request3.urlList = [request3.url]; } if (init.referrer !== void 0) { const referrer = init.referrer; if (referrer === "") { - request2.referrer = "no-referrer"; + request3.referrer = "no-referrer"; } else { let parsedReferrer; try { @@ -12340,14 +12327,14 @@ var require_request2 = __commonJS({ throw new TypeError(`Referrer "${referrer}" is not a valid URL.`, { cause: err }); } if (parsedReferrer.protocol === "about:" && parsedReferrer.hostname === "client" || origin && !sameOrigin(parsedReferrer, this[kRealm].settingsObject.baseUrl)) { - request2.referrer = "client"; + request3.referrer = "client"; } else { - request2.referrer = parsedReferrer; + request3.referrer = parsedReferrer; } } } if (init.referrerPolicy !== void 0) { - request2.referrerPolicy = init.referrerPolicy; + request3.referrerPolicy = init.referrerPolicy; } let mode; if (init.mode !== void 0) { @@ -12362,27 +12349,27 @@ var require_request2 = __commonJS({ }); } if (mode != null) { - request2.mode = mode; + request3.mode = mode; } if (init.credentials !== void 0) { - request2.credentials = init.credentials; + request3.credentials = init.credentials; } if (init.cache !== void 0) { - request2.cache = init.cache; + request3.cache = init.cache; } - if (request2.cache === "only-if-cached" && request2.mode !== "same-origin") { + if (request3.cache === "only-if-cached" && request3.mode !== "same-origin") { throw new TypeError( "'only-if-cached' can be set only with 'same-origin' mode" ); } if (init.redirect !== void 0) { - request2.redirect = init.redirect; + request3.redirect = init.redirect; } if (init.integrity != null) { - request2.integrity = String(init.integrity); + request3.integrity = String(init.integrity); } if (init.keepalive !== void 0) { - request2.keepalive = Boolean(init.keepalive); + request3.keepalive = Boolean(init.keepalive); } if (init.method !== void 0) { let method = init.method; @@ -12393,12 +12380,12 @@ var require_request2 = __commonJS({ throw new TypeError(`'${method}' HTTP method is unsupported.`); } method = normalizeMethodRecord[method] ?? normalizeMethod(method); - request2.method = method; + request3.method = method; } if (init.signal !== void 0) { signal = init.signal; } - this[kState] = request2; + this[kState] = request3; const ac = new AbortController(); this[kSignal] = ac.signal; this[kSignal][kRealm] = this[kRealm]; @@ -12432,13 +12419,13 @@ var require_request2 = __commonJS({ } } this[kHeaders] = new Headers2(kConstruct); - this[kHeaders][kHeadersList] = request2.headersList; + this[kHeaders][kHeadersList] = request3.headersList; this[kHeaders][kGuard] = "request"; this[kHeaders][kRealm] = this[kRealm]; if (mode === "no-cors") { - if (!corsSafeListedMethodsSet.has(request2.method)) { + if (!corsSafeListedMethodsSet.has(request3.method)) { throw new TypeError( - `'${request2.method} is unsupported in no-cors mode.` + `'${request3.method} is unsupported in no-cors mode.` ); } this[kHeaders][kGuard] = "request-no-cors"; @@ -12457,14 +12444,14 @@ var require_request2 = __commonJS({ } } const inputBody = input instanceof _Request ? input[kState].body : null; - if ((init.body != null || inputBody != null) && (request2.method === "GET" || request2.method === "HEAD")) { + if ((init.body != null || inputBody != null) && (request3.method === "GET" || request3.method === "HEAD")) { throw new TypeError("Request with GET/HEAD method cannot have body."); } let initBody = null; if (init.body != null) { const [extractedBody, contentType] = extractBody( init.body, - request2.keepalive + request3.keepalive ); initBody = extractedBody; if (contentType && !this[kHeaders][kHeadersList].contains("content-type")) { @@ -12476,12 +12463,12 @@ var require_request2 = __commonJS({ if (initBody != null && init.duplex == null) { throw new TypeError("RequestInit: duplex option is required when sending a body."); } - if (request2.mode !== "same-origin" && request2.mode !== "cors") { + if (request3.mode !== "same-origin" && request3.mode !== "cors") { throw new TypeError( 'If request is made from ReadableStream, mode should be "same-origin" or "cors"' ); } - request2.useCORSPreflightFlag = true; + request3.useCORSPreflightFlag = true; } let finalBody = inputOrInitBody; if (initBody == null && inputBody != null) { @@ -12651,7 +12638,7 @@ var require_request2 = __commonJS({ }; mixinBody(Request2); function makeRequest(init) { - const request2 = { + const request3 = { method: "GET", localURLsOnly: false, unsafeRequest: false, @@ -12690,13 +12677,13 @@ var require_request2 = __commonJS({ ...init, headersList: init.headersList ? new HeadersList(init.headersList) : new HeadersList() }; - request2.url = request2.urlList[0]; - return request2; + request3.url = request3.urlList[0]; + return request3; } - function cloneRequest(request2) { - const newRequest = makeRequest({ ...request2, body: null }); - if (request2.body != null) { - newRequest.body = cloneBody(request2.body); + function cloneRequest(request3) { + const newRequest = makeRequest({ ...request3, body: null }); + if (request3.body != null) { + newRequest.body = cloneBody(request3.body); } return newRequest; } @@ -12929,14 +12916,14 @@ var require_fetch = __commonJS({ p.reject(e2); return p.promise; } - const request2 = requestObject[kState]; + const request3 = requestObject[kState]; if (requestObject.signal.aborted) { - abortFetch(p, request2, null, requestObject.signal.reason); + abortFetch(p, request3, null, requestObject.signal.reason); return p.promise; } - const globalObject = request2.client.globalObject; + const globalObject = request3.client.globalObject; if (globalObject?.constructor?.name === "ServiceWorkerGlobalScope") { - request2.serviceWorkers = "none"; + request3.serviceWorkers = "none"; } let responseObject = null; const relevantRealm = null; @@ -12948,7 +12935,7 @@ var require_fetch = __commonJS({ locallyAborted = true; assert(controller != null); controller.abort(requestObject.signal.reason); - abortFetch(p, request2, responseObject, requestObject.signal.reason); + abortFetch(p, request3, responseObject, requestObject.signal.reason); } ); const handleFetchDone = (response) => finalizeAndReportTiming(response, "fetch"); @@ -12957,7 +12944,7 @@ var require_fetch = __commonJS({ return Promise.resolve(); } if (response.aborted) { - abortFetch(p, request2, responseObject, controller.serializedAbortReason); + abortFetch(p, request3, responseObject, controller.serializedAbortReason); return Promise.resolve(); } if (response.type === "error") { @@ -12975,7 +12962,7 @@ var require_fetch = __commonJS({ p.resolve(responseObject); }; controller = fetching({ - request: request2, + request: request3, processResponseEndOfBody: handleFetchDone, processResponse, dispatcher: init.dispatcher ?? getGlobalDispatcher() @@ -13020,13 +13007,13 @@ var require_fetch = __commonJS({ performance.markResourceTiming(timingInfo, originalURL.href, initiatorType, globalThis2, cacheState); } } - function abortFetch(p, request2, responseObject, error) { + function abortFetch(p, request3, responseObject, error) { if (!error) { error = new DOMException3("The operation was aborted.", "AbortError"); } p.reject(error); - if (request2.body != null && isReadable(request2.body?.stream)) { - request2.body.stream.cancel(error).catch((err) => { + if (request3.body != null && isReadable(request3.body?.stream)) { + request3.body.stream.cancel(error).catch((err) => { if (err.code === "ERR_INVALID_STATE") { return; } @@ -13047,7 +13034,7 @@ var require_fetch = __commonJS({ } } function fetching({ - request: request2, + request: request3, processRequestBodyChunkLength, processRequestEndOfBody, processResponse, @@ -13059,9 +13046,9 @@ var require_fetch = __commonJS({ }) { let taskDestination = null; let crossOriginIsolatedCapability = false; - if (request2.client != null) { - taskDestination = request2.client.globalObject; - crossOriginIsolatedCapability = request2.client.crossOriginIsolatedCapability; + if (request3.client != null) { + taskDestination = request3.client.globalObject; + crossOriginIsolatedCapability = request3.client.crossOriginIsolatedCapability; } const currenTime = coarsenedSharedCurrentTime(crossOriginIsolatedCapability); const timingInfo = createOpaqueTimingInfo({ @@ -13069,7 +13056,7 @@ var require_fetch = __commonJS({ }); const fetchParams = { controller: new Fetch(dispatcher), - request: request2, + request: request3, timingInfo, processRequestBodyChunkLength, processRequestEndOfBody, @@ -13079,32 +13066,32 @@ var require_fetch = __commonJS({ taskDestination, crossOriginIsolatedCapability }; - assert(!request2.body || request2.body.stream); - if (request2.window === "client") { - request2.window = request2.client?.globalObject?.constructor?.name === "Window" ? request2.client : "no-window"; + assert(!request3.body || request3.body.stream); + if (request3.window === "client") { + request3.window = request3.client?.globalObject?.constructor?.name === "Window" ? request3.client : "no-window"; } - if (request2.origin === "client") { - request2.origin = request2.client?.origin; + if (request3.origin === "client") { + request3.origin = request3.client?.origin; } - if (request2.policyContainer === "client") { - if (request2.client != null) { - request2.policyContainer = clonePolicyContainer( - request2.client.policyContainer + if (request3.policyContainer === "client") { + if (request3.client != null) { + request3.policyContainer = clonePolicyContainer( + request3.client.policyContainer ); } else { - request2.policyContainer = makePolicyContainer(); + request3.policyContainer = makePolicyContainer(); } } - if (!request2.headersList.contains("accept")) { + if (!request3.headersList.contains("accept")) { const value = "*/*"; - request2.headersList.append("accept", value); + request3.headersList.append("accept", value); } - if (!request2.headersList.contains("accept-language")) { - request2.headersList.append("accept-language", "*"); + if (!request3.headersList.contains("accept-language")) { + request3.headersList.append("accept-language", "*"); } - if (request2.priority === null) { + if (request3.priority === null) { } - if (subresourceSet.has(request2.destination)) { + if (subresourceSet.has(request3.destination)) { } mainFetch(fetchParams).catch((err) => { fetchParams.controller.terminate(err); @@ -13112,50 +13099,50 @@ var require_fetch = __commonJS({ return fetchParams.controller; } async function mainFetch(fetchParams, recursive = false) { - const request2 = fetchParams.request; + const request3 = fetchParams.request; let response = null; - if (request2.localURLsOnly && !urlIsLocal(requestCurrentURL(request2))) { + if (request3.localURLsOnly && !urlIsLocal(requestCurrentURL(request3))) { response = makeNetworkError("local URLs only"); } - tryUpgradeRequestToAPotentiallyTrustworthyURL(request2); - if (requestBadPort(request2) === "blocked") { + tryUpgradeRequestToAPotentiallyTrustworthyURL(request3); + if (requestBadPort(request3) === "blocked") { response = makeNetworkError("bad port"); } - if (request2.referrerPolicy === "") { - request2.referrerPolicy = request2.policyContainer.referrerPolicy; + if (request3.referrerPolicy === "") { + request3.referrerPolicy = request3.policyContainer.referrerPolicy; } - if (request2.referrer !== "no-referrer") { - request2.referrer = determineRequestsReferrer2(request2); + if (request3.referrer !== "no-referrer") { + request3.referrer = determineRequestsReferrer2(request3); } if (response === null) { response = await (async () => { - const currentURL = requestCurrentURL(request2); + const currentURL = requestCurrentURL(request3); if ( // - request’s current URL’s origin is same origin with request’s origin, // and request’s response tainting is "basic" - sameOrigin(currentURL, request2.url) && request2.responseTainting === "basic" || // request’s current URL’s scheme is "data" + sameOrigin(currentURL, request3.url) && request3.responseTainting === "basic" || // request’s current URL’s scheme is "data" currentURL.protocol === "data:" || // - request’s mode is "navigate" or "websocket" - (request2.mode === "navigate" || request2.mode === "websocket") + (request3.mode === "navigate" || request3.mode === "websocket") ) { - request2.responseTainting = "basic"; + request3.responseTainting = "basic"; return await schemeFetch(fetchParams); } - if (request2.mode === "same-origin") { + if (request3.mode === "same-origin") { return makeNetworkError('request mode cannot be "same-origin"'); } - if (request2.mode === "no-cors") { - if (request2.redirect !== "follow") { + if (request3.mode === "no-cors") { + if (request3.redirect !== "follow") { return makeNetworkError( 'redirect mode cannot be "follow" for "no-cors" request' ); } - request2.responseTainting = "opaque"; + request3.responseTainting = "opaque"; return await schemeFetch(fetchParams); } - if (!urlIsHttpHttpsScheme(requestCurrentURL(request2))) { + if (!urlIsHttpHttpsScheme(requestCurrentURL(request3))) { return makeNetworkError("URL scheme must be a HTTP(S) scheme"); } - request2.responseTainting = "cors"; + request3.responseTainting = "cors"; return await httpFetch(fetchParams); })(); } @@ -13163,13 +13150,13 @@ var require_fetch = __commonJS({ return response; } if (response.status !== 0 && !response.internalResponse) { - if (request2.responseTainting === "cors") { + if (request3.responseTainting === "cors") { } - if (request2.responseTainting === "basic") { + if (request3.responseTainting === "basic") { response = filterResponse(response, "basic"); - } else if (request2.responseTainting === "cors") { + } else if (request3.responseTainting === "cors") { response = filterResponse(response, "cors"); - } else if (request2.responseTainting === "opaque") { + } else if (request3.responseTainting === "opaque") { response = filterResponse(response, "opaque"); } else { assert(false); @@ -13177,26 +13164,26 @@ var require_fetch = __commonJS({ } let internalResponse = response.status === 0 ? response : response.internalResponse; if (internalResponse.urlList.length === 0) { - internalResponse.urlList.push(...request2.urlList); + internalResponse.urlList.push(...request3.urlList); } - if (!request2.timingAllowFailed) { + if (!request3.timingAllowFailed) { response.timingAllowPassed = true; } - if (response.type === "opaque" && internalResponse.status === 206 && internalResponse.rangeRequested && !request2.headers.contains("range")) { + if (response.type === "opaque" && internalResponse.status === 206 && internalResponse.rangeRequested && !request3.headers.contains("range")) { response = internalResponse = makeNetworkError(); } - if (response.status !== 0 && (request2.method === "HEAD" || request2.method === "CONNECT" || nullBodyStatus.includes(internalResponse.status))) { + if (response.status !== 0 && (request3.method === "HEAD" || request3.method === "CONNECT" || nullBodyStatus.includes(internalResponse.status))) { internalResponse.body = null; fetchParams.controller.dump = true; } - if (request2.integrity) { + if (request3.integrity) { const processBodyError = (reason) => fetchFinale(fetchParams, makeNetworkError(reason)); - if (request2.responseTainting === "opaque" || response.body == null) { + if (request3.responseTainting === "opaque" || response.body == null) { processBodyError(response.error); return; } const processBody = (bytes) => { - if (!bytesMatch(bytes, request2.integrity)) { + if (!bytesMatch(bytes, request3.integrity)) { processBodyError("integrity mismatch"); return; } @@ -13212,8 +13199,8 @@ var require_fetch = __commonJS({ if (isCancelled(fetchParams) && fetchParams.request.redirectCount === 0) { return Promise.resolve(makeAppropriateNetworkError(fetchParams)); } - const { request: request2 } = fetchParams; - const { protocol: scheme } = requestCurrentURL(request2); + const { request: request3 } = fetchParams; + const { protocol: scheme } = requestCurrentURL(request3); switch (scheme) { case "about:": { return Promise.resolve(makeNetworkError("about scheme is not supported")); @@ -13222,12 +13209,12 @@ var require_fetch = __commonJS({ if (!resolveObjectURL) { resolveObjectURL = require("buffer").resolveObjectURL; } - const blobURLEntry = requestCurrentURL(request2); + const blobURLEntry = requestCurrentURL(request3); if (blobURLEntry.search.length !== 0) { return Promise.resolve(makeNetworkError("NetworkError when attempting to fetch resource.")); } const blobURLEntryObject = resolveObjectURL(blobURLEntry.toString()); - if (request2.method !== "GET" || !isBlobLike(blobURLEntryObject)) { + if (request3.method !== "GET" || !isBlobLike(blobURLEntryObject)) { return Promise.resolve(makeNetworkError("invalid method")); } const bodyWithType = safelyExtractBody(blobURLEntryObject); @@ -13245,7 +13232,7 @@ var require_fetch = __commonJS({ return Promise.resolve(response); } case "data:": { - const currentURL = requestCurrentURL(request2); + const currentURL = requestCurrentURL(request3); const dataURLStruct = dataURLProcessor(currentURL); if (dataURLStruct === "failure") { return Promise.resolve(makeNetworkError("failed to fetch the data URL")); @@ -13327,41 +13314,41 @@ var require_fetch = __commonJS({ } } async function httpFetch(fetchParams) { - const request2 = fetchParams.request; + const request3 = fetchParams.request; let response = null; let actualResponse = null; const timingInfo = fetchParams.timingInfo; - if (request2.serviceWorkers === "all") { + if (request3.serviceWorkers === "all") { } if (response === null) { - if (request2.redirect === "follow") { - request2.serviceWorkers = "none"; + if (request3.redirect === "follow") { + request3.serviceWorkers = "none"; } actualResponse = response = await httpNetworkOrCacheFetch(fetchParams); - if (request2.responseTainting === "cors" && corsCheck(request2, response) === "failure") { + if (request3.responseTainting === "cors" && corsCheck(request3, response) === "failure") { return makeNetworkError("cors failure"); } - if (TAOCheck(request2, response) === "failure") { - request2.timingAllowFailed = true; + if (TAOCheck(request3, response) === "failure") { + request3.timingAllowFailed = true; } } - if ((request2.responseTainting === "opaque" || response.type === "opaque") && crossOriginResourcePolicyCheck( - request2.origin, - request2.client, - request2.destination, + if ((request3.responseTainting === "opaque" || response.type === "opaque") && crossOriginResourcePolicyCheck( + request3.origin, + request3.client, + request3.destination, actualResponse ) === "blocked") { return makeNetworkError("blocked"); } if (redirectStatusSet.has(actualResponse.status)) { - if (request2.redirect !== "manual") { + if (request3.redirect !== "manual") { fetchParams.controller.connection.destroy(); } - if (request2.redirect === "error") { + if (request3.redirect === "error") { response = makeNetworkError("unexpected redirect"); - } else if (request2.redirect === "manual") { + } else if (request3.redirect === "manual") { response = actualResponse; - } else if (request2.redirect === "follow") { + } else if (request3.redirect === "follow") { response = await httpRedirectFetch(fetchParams, response); } else { assert(false); @@ -13371,13 +13358,13 @@ var require_fetch = __commonJS({ return response; } function httpRedirectFetch(fetchParams, response) { - const request2 = fetchParams.request; + const request3 = fetchParams.request; const actualResponse = response.internalResponse ? response.internalResponse : response; let locationURL; try { locationURL = responseLocationURL( actualResponse, - requestCurrentURL(request2).hash + requestCurrentURL(request3).hash ); if (locationURL == null) { return response; @@ -13388,63 +13375,63 @@ var require_fetch = __commonJS({ if (!urlIsHttpHttpsScheme(locationURL)) { return Promise.resolve(makeNetworkError("URL scheme must be a HTTP(S) scheme")); } - if (request2.redirectCount === 20) { + if (request3.redirectCount === 20) { return Promise.resolve(makeNetworkError("redirect count exceeded")); } - request2.redirectCount += 1; - if (request2.mode === "cors" && (locationURL.username || locationURL.password) && !sameOrigin(request2, locationURL)) { + request3.redirectCount += 1; + if (request3.mode === "cors" && (locationURL.username || locationURL.password) && !sameOrigin(request3, locationURL)) { return Promise.resolve(makeNetworkError('cross origin not allowed for request mode "cors"')); } - if (request2.responseTainting === "cors" && (locationURL.username || locationURL.password)) { + if (request3.responseTainting === "cors" && (locationURL.username || locationURL.password)) { return Promise.resolve(makeNetworkError( 'URL cannot contain credentials for request mode "cors"' )); } - if (actualResponse.status !== 303 && request2.body != null && request2.body.source == null) { + if (actualResponse.status !== 303 && request3.body != null && request3.body.source == null) { return Promise.resolve(makeNetworkError()); } - if ([301, 302].includes(actualResponse.status) && request2.method === "POST" || actualResponse.status === 303 && !GET_OR_HEAD.includes(request2.method)) { - request2.method = "GET"; - request2.body = null; + if ([301, 302].includes(actualResponse.status) && request3.method === "POST" || actualResponse.status === 303 && !GET_OR_HEAD.includes(request3.method)) { + request3.method = "GET"; + request3.body = null; for (const headerName of requestBodyHeader) { - request2.headersList.delete(headerName); + request3.headersList.delete(headerName); } } - if (!sameOrigin(requestCurrentURL(request2), locationURL)) { - request2.headersList.delete("authorization"); - request2.headersList.delete("proxy-authorization", true); - request2.headersList.delete("cookie"); - request2.headersList.delete("host"); + if (!sameOrigin(requestCurrentURL(request3), locationURL)) { + request3.headersList.delete("authorization"); + request3.headersList.delete("proxy-authorization", true); + request3.headersList.delete("cookie"); + request3.headersList.delete("host"); } - if (request2.body != null) { - assert(request2.body.source != null); - request2.body = safelyExtractBody(request2.body.source)[0]; + if (request3.body != null) { + assert(request3.body.source != null); + request3.body = safelyExtractBody(request3.body.source)[0]; } const timingInfo = fetchParams.timingInfo; timingInfo.redirectEndTime = timingInfo.postRedirectStartTime = coarsenedSharedCurrentTime(fetchParams.crossOriginIsolatedCapability); if (timingInfo.redirectStartTime === 0) { timingInfo.redirectStartTime = timingInfo.startTime; } - request2.urlList.push(locationURL); - setRequestReferrerPolicyOnRedirect(request2, actualResponse); + request3.urlList.push(locationURL); + setRequestReferrerPolicyOnRedirect(request3, actualResponse); return mainFetch(fetchParams, true); } async function httpNetworkOrCacheFetch(fetchParams, isAuthenticationFetch = false, isNewConnectionFetch = false) { - const request2 = fetchParams.request; + const request3 = fetchParams.request; let httpFetchParams = null; let httpRequest = null; let response = null; const httpCache = null; const revalidatingFlag = false; - if (request2.window === "no-window" && request2.redirect === "error") { + if (request3.window === "no-window" && request3.redirect === "error") { httpFetchParams = fetchParams; - httpRequest = request2; + httpRequest = request3; } else { - httpRequest = makeRequest(request2); + httpRequest = makeRequest(request3); httpFetchParams = { ...fetchParams }; httpFetchParams.request = httpRequest; } - const includeCredentials = request2.credentials === "include" || request2.credentials === "same-origin" && request2.responseTainting === "basic"; + const includeCredentials = request3.credentials === "include" || request3.credentials === "same-origin" && request3.responseTainting === "basic"; const contentLength = httpRequest.body ? httpRequest.body.length : null; let contentLengthHeaderValue = null; if (httpRequest.body == null && ["POST", "PUT"].includes(httpRequest.method)) { @@ -13521,7 +13508,7 @@ var require_fetch = __commonJS({ } response.requestIncludesCredentials = includeCredentials; if (response.status === 407) { - if (request2.window === "no-window") { + if (request3.window === "no-window") { return makeNetworkError(); } if (isCancelled(fetchParams)) { @@ -13533,7 +13520,7 @@ var require_fetch = __commonJS({ // response’s status is 421 response.status === 421 && // isNewConnectionFetch is false !isNewConnectionFetch && // request’s body is null, or request’s body is non-null and request’s body’s source is non-null - (request2.body == null || request2.body.source != null) + (request3.body == null || request3.body.source != null) ) { if (isCancelled(fetchParams)) { return makeAppropriateNetworkError(fetchParams); @@ -13561,21 +13548,21 @@ var require_fetch = __commonJS({ } } }; - const request2 = fetchParams.request; + const request3 = fetchParams.request; let response = null; const timingInfo = fetchParams.timingInfo; const httpCache = null; if (httpCache == null) { - request2.cache = "no-store"; + request3.cache = "no-store"; } const newConnection = forceNewConnection ? "yes" : "no"; - if (request2.mode === "websocket") { + if (request3.mode === "websocket") { } else { } let requestBody = null; - if (request2.body == null && fetchParams.processRequestEndOfBody) { + if (request3.body == null && fetchParams.processRequestEndOfBody) { queueMicrotask(() => fetchParams.processRequestEndOfBody()); - } else if (request2.body != null) { + } else if (request3.body != null) { const processBodyChunk = async function* (bytes) { if (isCancelled(fetchParams)) { return; @@ -13603,7 +13590,7 @@ var require_fetch = __commonJS({ }; requestBody = async function* () { try { - for await (const bytes of request2.body.stream) { + for await (const bytes of request3.body.stream) { yield* processBodyChunk(bytes); } processEndOfBody(); @@ -13617,8 +13604,8 @@ var require_fetch = __commonJS({ if (socket) { response = makeResponse({ status, statusText, headersList, socket }); } else { - const iterator2 = body[Symbol.asyncIterator](); - fetchParams.controller.next = () => iterator2.next(); + const iterator3 = body[Symbol.asyncIterator](); + fetchParams.controller.next = () => iterator3.next(); response = makeResponse({ status, statusText, headersList }); } } catch (err) { @@ -13715,17 +13702,17 @@ var require_fetch = __commonJS({ } return response; async function dispatch({ body }) { - const url = requestCurrentURL(request2); + const url = requestCurrentURL(request3); const agent = fetchParams.controller.dispatcher; return new Promise((resolve, reject) => agent.dispatch( { path: url.pathname + url.search, origin: url.origin, - method: request2.method, - body: fetchParams.controller.dispatcher.isMockActive ? request2.body && (request2.body.source || request2.body.stream) : body, - headers: request2.headersList.entries, + method: request3.method, + body: fetchParams.controller.dispatcher.isMockActive ? request3.body && (request3.body.source || request3.body.stream) : body, + headers: request3.headersList.entries, maxRedirections: 0, - upgrade: request2.mode === "websocket" ? "websocket" : void 0 + upgrade: request3.mode === "websocket" ? "websocket" : void 0 }, { body: null, @@ -13771,8 +13758,8 @@ var require_fetch = __commonJS({ } this.body = new Readable({ read: resume }); const decoders = []; - const willFollow = request2.redirect === "follow" && location && redirectStatusSet.has(status); - if (request2.method !== "HEAD" && request2.method !== "CONNECT" && !nullBodyStatus.includes(status) && !willFollow) { + const willFollow = request3.redirect === "follow" && location && redirectStatusSet.has(status); + if (request3.method !== "HEAD" && request3.method !== "CONNECT" && !nullBodyStatus.includes(status) && !willFollow) { for (const coding of codings) { if (coding === "x-gzip" || coding === "gzip") { decoders.push(zlib2.createGunzip({ @@ -14742,35 +14729,34 @@ var require_cache = __commonJS({ } this.#relevantRequestResponseList = arguments[1]; } - async match(request2, options = {}) { + async match(request3, options = {}) { webidl.brandCheck(this, _Cache); webidl.argumentLengthCheck(arguments, 1, { header: "Cache.match" }); - request2 = webidl.converters.RequestInfo(request2); + request3 = webidl.converters.RequestInfo(request3); options = webidl.converters.CacheQueryOptions(options); - const p = await this.matchAll(request2, options); + const p = await this.matchAll(request3, options); if (p.length === 0) { return; } return p[0]; } - async matchAll(request2 = void 0, options = {}) { + async matchAll(request3 = void 0, options = {}) { webidl.brandCheck(this, _Cache); - if (request2 !== void 0) - request2 = webidl.converters.RequestInfo(request2); + if (request3 !== void 0) request3 = webidl.converters.RequestInfo(request3); options = webidl.converters.CacheQueryOptions(options); let r2 = null; - if (request2 !== void 0) { - if (request2 instanceof Request2) { - r2 = request2[kState]; + if (request3 !== void 0) { + if (request3 instanceof Request2) { + r2 = request3[kState]; if (r2.method !== "GET" && !options.ignoreMethod) { return []; } - } else if (typeof request2 === "string") { - r2 = new Request2(request2)[kState]; + } else if (typeof request3 === "string") { + r2 = new Request2(request3)[kState]; } } const responses = []; - if (request2 === void 0) { + if (request3 === void 0) { for (const requestResponse of this.#relevantRequestResponseList) { responses.push(requestResponse[1]); } @@ -14792,11 +14778,11 @@ var require_cache = __commonJS({ } return Object.freeze(responseList); } - async add(request2) { + async add(request3) { webidl.brandCheck(this, _Cache); webidl.argumentLengthCheck(arguments, 1, { header: "Cache.add" }); - request2 = webidl.converters.RequestInfo(request2); - const requests = [request2]; + request3 = webidl.converters.RequestInfo(request3); + const requests = [request3]; const responseArrayPromise = this.addAll(requests); return await responseArrayPromise; } @@ -14806,11 +14792,11 @@ var require_cache = __commonJS({ requests = webidl.converters["sequence"](requests); const responsePromises = []; const requestList = []; - for (const request2 of requests) { - if (typeof request2 === "string") { + for (const request3 of requests) { + if (typeof request3 === "string") { continue; } - const r2 = request2[kState]; + const r2 = request3[kState]; if (!urlIsHttpHttpsScheme(r2.url) || r2.method !== "GET") { throw webidl.errors.exception({ header: "Cache.addAll", @@ -14819,8 +14805,8 @@ var require_cache = __commonJS({ } } const fetchControllers = []; - for (const request2 of requests) { - const r2 = new Request2(request2)[kState]; + for (const request3 of requests) { + const r2 = new Request2(request3)[kState]; if (!urlIsHttpHttpsScheme(r2.url)) { throw webidl.errors.exception({ header: "Cache.addAll", @@ -14898,16 +14884,16 @@ var require_cache = __commonJS({ }); return cacheJobPromise.promise; } - async put(request2, response) { + async put(request3, response) { webidl.brandCheck(this, _Cache); webidl.argumentLengthCheck(arguments, 2, { header: "Cache.put" }); - request2 = webidl.converters.RequestInfo(request2); + request3 = webidl.converters.RequestInfo(request3); response = webidl.converters.Response(response); let innerRequest = null; - if (request2 instanceof Request2) { - innerRequest = request2[kState]; + if (request3 instanceof Request2) { + innerRequest = request3[kState]; } else { - innerRequest = new Request2(request2)[kState]; + innerRequest = new Request2(request3)[kState]; } if (!urlIsHttpHttpsScheme(innerRequest.url) || innerRequest.method !== "GET") { throw webidl.errors.exception({ @@ -14978,20 +14964,20 @@ var require_cache = __commonJS({ }); return cacheJobPromise.promise; } - async delete(request2, options = {}) { + async delete(request3, options = {}) { webidl.brandCheck(this, _Cache); webidl.argumentLengthCheck(arguments, 1, { header: "Cache.delete" }); - request2 = webidl.converters.RequestInfo(request2); + request3 = webidl.converters.RequestInfo(request3); options = webidl.converters.CacheQueryOptions(options); let r2 = null; - if (request2 instanceof Request2) { - r2 = request2[kState]; + if (request3 instanceof Request2) { + r2 = request3[kState]; if (r2.method !== "GET" && !options.ignoreMethod) { return false; } } else { - assert(typeof request2 === "string"); - r2 = new Request2(request2)[kState]; + assert(typeof request3 === "string"); + r2 = new Request2(request3)[kState]; } const operations = []; const operation = { @@ -15023,25 +15009,24 @@ var require_cache = __commonJS({ * @param {import('../../types/cache').CacheQueryOptions} options * @returns {readonly Request[]} */ - async keys(request2 = void 0, options = {}) { + async keys(request3 = void 0, options = {}) { webidl.brandCheck(this, _Cache); - if (request2 !== void 0) - request2 = webidl.converters.RequestInfo(request2); + if (request3 !== void 0) request3 = webidl.converters.RequestInfo(request3); options = webidl.converters.CacheQueryOptions(options); let r2 = null; - if (request2 !== void 0) { - if (request2 instanceof Request2) { - r2 = request2[kState]; + if (request3 !== void 0) { + if (request3 instanceof Request2) { + r2 = request3[kState]; if (r2.method !== "GET" && !options.ignoreMethod) { return []; } - } else if (typeof request2 === "string") { - r2 = new Request2(request2)[kState]; + } else if (typeof request3 === "string") { + r2 = new Request2(request3)[kState]; } } const promise = createDeferredPromise(); const requests = []; - if (request2 === void 0) { + if (request3 === void 0) { for (const requestResponse of this.#relevantRequestResponseList) { requests.push(requestResponse[0]); } @@ -15053,12 +15038,12 @@ var require_cache = __commonJS({ } queueMicrotask(() => { const requestList = []; - for (const request3 of requests) { + for (const request4 of requests) { const requestObject = new Request2("https://a"); - requestObject[kState] = request3; - requestObject[kHeaders][kHeadersList] = request3.headersList; + requestObject[kState] = request4; + requestObject[kHeaders][kHeadersList] = request4.headersList; requestObject[kHeaders][kGuard] = "immutable"; - requestObject[kRealm] = request3.client; + requestObject[kRealm] = request4.client; requestList.push(requestObject); } promise.resolve(Object.freeze(requestList)); @@ -15173,9 +15158,9 @@ var require_cache = __commonJS({ * @param {import('../../types/cache').CacheQueryOptions | undefined} options * @returns {boolean} */ - #requestMatchesCachedItem(requestQuery, request2, response = null, options) { + #requestMatchesCachedItem(requestQuery, request3, response = null, options) { const queryURL = new URL(requestQuery.url); - const cachedURL = new URL(request2.url); + const cachedURL = new URL(request3.url); if (options?.ignoreSearch) { cachedURL.search = ""; queryURL.search = ""; @@ -15191,7 +15176,7 @@ var require_cache = __commonJS({ if (fieldValue === "*") { return false; } - const requestValue = request2.headersList.get(fieldValue); + const requestValue = request3.headersList.get(fieldValue); const queryValue = requestQuery.headersList.get(fieldValue); if (requestValue !== queryValue) { return false; @@ -15267,21 +15252,21 @@ var require_cachestorage = __commonJS({ webidl.illegalConstructor(); } } - async match(request2, options = {}) { + async match(request3, options = {}) { webidl.brandCheck(this, _CacheStorage); webidl.argumentLengthCheck(arguments, 1, { header: "CacheStorage.match" }); - request2 = webidl.converters.RequestInfo(request2); + request3 = webidl.converters.RequestInfo(request3); options = webidl.converters.MultiCacheQueryOptions(options); if (options.cacheName != null) { if (this.#caches.has(options.cacheName)) { const cacheList = this.#caches.get(options.cacheName); const cache2 = new Cache(kConstruct, cacheList); - return await cache2.match(request2, options); + return await cache2.match(request3, options); } } else { for (const cacheList of this.#caches.values()) { const cache2 = new Cache(kConstruct, cacheList); - const response = await cache2.match(request2, options); + const response = await cache2.match(request3, options); if (response !== void 0) { return response; } @@ -15455,7 +15440,7 @@ var require_util6 = __commonJS({ throw new Error("Invalid cookie max-age"); } } - function stringify2(cookie) { + function stringify(cookie) { if (cookie.name.length === 0) { return null; } @@ -15520,7 +15505,7 @@ var require_util6 = __commonJS({ } module2.exports = { isCTLExcludingHtab, - stringify: stringify2, + stringify, getHeadersList }; } @@ -15671,7 +15656,7 @@ var require_cookies = __commonJS({ "node_modules/@actions/http-client/node_modules/undici/lib/cookies/index.js"(exports2, module2) { "use strict"; var { parseSetCookie } = require_parse(); - var { stringify: stringify2, getHeadersList } = require_util6(); + var { stringify, getHeadersList } = require_util6(); var { webidl } = require_webidl(); var { Headers: Headers2 } = require_headers(); function getCookies(headers) { @@ -15713,9 +15698,9 @@ var require_cookies = __commonJS({ webidl.argumentLengthCheck(arguments, 2, { header: "setCookie" }); webidl.brandCheck(headers, Headers2, { strict: false }); cookie = webidl.converters.Cookie(cookie); - const str = stringify2(cookie); + const str = stringify(cookie); if (str) { - headers.append("Set-Cookie", stringify2(cookie)); + headers.append("Set-Cookie", stringify(cookie)); } } webidl.converters.DeleteCookieAttributes = webidl.dictionaryConverter([ @@ -16211,15 +16196,15 @@ var require_connection = __commonJS({ channels.open = diagnosticsChannel.channel("undici:websocket:open"); channels.close = diagnosticsChannel.channel("undici:websocket:close"); channels.socketError = diagnosticsChannel.channel("undici:websocket:socket_error"); - var crypto4; + var crypto; try { - crypto4 = require("crypto"); + crypto = require("crypto"); } catch { } function establishWebSocketConnection(url, protocols, ws, onEstablish, options) { const requestURL = url; requestURL.protocol = url.protocol === "ws:" ? "http:" : "https:"; - const request2 = makeRequest({ + const request3 = makeRequest({ urlList: [requestURL], serviceWorkers: "none", referrer: "no-referrer", @@ -16230,17 +16215,17 @@ var require_connection = __commonJS({ }); if (options.headers) { const headersList = new Headers2(options.headers)[kHeadersList]; - request2.headersList = headersList; + request3.headersList = headersList; } - const keyValue = crypto4.randomBytes(16).toString("base64"); - request2.headersList.append("sec-websocket-key", keyValue); - request2.headersList.append("sec-websocket-version", "13"); + const keyValue = crypto.randomBytes(16).toString("base64"); + request3.headersList.append("sec-websocket-key", keyValue); + request3.headersList.append("sec-websocket-version", "13"); for (const protocol of protocols) { - request2.headersList.append("sec-websocket-protocol", protocol); + request3.headersList.append("sec-websocket-protocol", protocol); } const permessageDeflate = ""; const controller = fetching({ - request: request2, + request: request3, useParallelQueue: true, dispatcher: options.dispatcher ?? getGlobalDispatcher(), processResponse(response) { @@ -16261,7 +16246,7 @@ var require_connection = __commonJS({ return; } const secWSAccept = response.headersList.get("Sec-WebSocket-Accept"); - const digest = crypto4.createHash("sha1").update(keyValue + uid).digest("base64"); + const digest = crypto.createHash("sha1").update(keyValue + uid).digest("base64"); if (secWSAccept !== digest) { failWebsocketConnection(ws, "Incorrect hash received in Sec-WebSocket-Accept header."); return; @@ -16272,7 +16257,7 @@ var require_connection = __commonJS({ return; } const secProtocol = response.headersList.get("Sec-WebSocket-Protocol"); - if (secProtocol !== null && secProtocol !== request2.headersList.get("Sec-WebSocket-Protocol")) { + if (secProtocol !== null && secProtocol !== request3.headersList.get("Sec-WebSocket-Protocol")) { failWebsocketConnection(ws, "Protocol was not set in the opening handshake."); return; } @@ -16341,9 +16326,9 @@ var require_frame = __commonJS({ "node_modules/@actions/http-client/node_modules/undici/lib/websocket/frame.js"(exports2, module2) { "use strict"; var { maxUnsigned16Bit } = require_constants5(); - var crypto4; + var crypto; try { - crypto4 = require("crypto"); + crypto = require("crypto"); } catch { } var WebsocketFrameSend = class { @@ -16352,7 +16337,7 @@ var require_frame = __commonJS({ */ constructor(data) { this.frameData = data; - this.maskKey = crypto4.randomBytes(4); + this.maskKey = crypto.randomBytes(4); } createFrame(opcode) { const bodyLength = this.frameData?.byteLength ?? 0; @@ -16930,12 +16915,12 @@ var require_websocket = __commonJS({ */ #onConnectionEstablished(response) { this[kResponse] = response; - const parser3 = new ByteParser(this); - parser3.on("drain", function onParserDrain() { + const parser4 = new ByteParser(this); + parser4.on("drain", function onParserDrain() { this.ws[kResponse].socket.resume(); }); response.socket.ws = this; - this[kByteParser] = parser3; + this[kByteParser] = parser4; this[kReadyState] = states.OPEN; const extensions = response.headersList.get("sec-websocket-extensions"); if (extensions !== null) { @@ -17079,9 +17064,9 @@ var require_undici = __commonJS({ module2.exports.buildConnector = buildConnector; module2.exports.errors = errors; function makeDispatcher(fn) { - return (url, opts, handler2) => { + return (url, opts, handler3) => { if (typeof opts === "function") { - handler2 = opts; + handler3 = opts; opts = null; } if (!url || typeof url !== "string" && typeof url !== "object" && !(url instanceof URL)) { @@ -17114,7 +17099,7 @@ var require_undici = __commonJS({ origin: url.origin, path: url.search ? `${url.pathname}${url.search}` : url.pathname, method: opts.method || (opts.body ? "PUT" : "GET") - }, handler2); + }, handler3); }; } module2.exports.setGlobalDispatcher = setGlobalDispatcher; @@ -17178,8 +17163,7 @@ var require_lib = __commonJS({ "node_modules/@actions/http-client/lib/index.js"(exports2) { "use strict"; var __createBinding = exports2 && exports2.__createBinding || (Object.create ? function(o, m2, k, k2) { - if (k2 === void 0) - k2 = k; + if (k2 === void 0) k2 = k; var desc = Object.getOwnPropertyDescriptor(m2, k); if (!desc || ("get" in desc ? !m2.__esModule : desc.writable || desc.configurable)) { desc = { enumerable: true, get: function() { @@ -17188,8 +17172,7 @@ var require_lib = __commonJS({ } Object.defineProperty(o, k2, desc); } : function(o, m2, k, k2) { - if (k2 === void 0) - k2 = k; + if (k2 === void 0) k2 = k; o[k2] = m2[k]; }); var __setModuleDefault = exports2 && exports2.__setModuleDefault || (Object.create ? function(o, v) { @@ -17198,13 +17181,10 @@ var require_lib = __commonJS({ o["default"] = v; }); var __importStar = exports2 && exports2.__importStar || function(mod) { - if (mod && mod.__esModule) - return mod; + if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) { - for (var k in mod) - if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) - __createBinding(result, mod, k); + for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); } __setModuleDefault(result, mod); return result; @@ -17349,7 +17329,7 @@ var require_lib = __commonJS({ } exports2.isHttps = isHttps; var HttpClient = class { - constructor(userAgent2, handlers, requestOptions) { + constructor(userAgent3, handlers, requestOptions) { this._ignoreSslError = false; this._allowRedirects = true; this._allowRedirectDowngrade = false; @@ -17358,7 +17338,7 @@ var require_lib = __commonJS({ this._maxRetries = 1; this._keepAlive = false; this._disposed = false; - this.userAgent = userAgent2; + this.userAgent = userAgent3; this.handlers = handlers || []; this.requestOptions = requestOptions; if (requestOptions) { @@ -17483,9 +17463,9 @@ var require_lib = __commonJS({ response = yield this.requestRaw(info, data); if (response && response.message && response.message.statusCode === HttpCodes.Unauthorized) { let authenticationHandler; - for (const handler2 of this.handlers) { - if (handler2.canHandleAuthentication(response)) { - authenticationHandler = handler2; + for (const handler3 of this.handlers) { + if (handler3.canHandleAuthentication(response)) { + authenticationHandler = handler3; break; } } @@ -17643,22 +17623,22 @@ var require_lib = __commonJS({ } info.options.agent = this._getAgent(info.parsedUrl); if (this.handlers) { - for (const handler2 of this.handlers) { - handler2.prepareRequest(info.options); + for (const handler3 of this.handlers) { + handler3.prepareRequest(info.options); } } return info; } _mergeHeaders(headers) { if (this.requestOptions && this.requestOptions.headers) { - return Object.assign({}, lowercaseKeys2(this.requestOptions.headers), lowercaseKeys2(headers || {})); + return Object.assign({}, lowercaseKeys3(this.requestOptions.headers), lowercaseKeys3(headers || {})); } - return lowercaseKeys2(headers || {}); + return lowercaseKeys3(headers || {}); } _getExistingOrDefaultHeader(additionalHeaders, header, _default) { let clientHeader; if (this.requestOptions && this.requestOptions.headers) { - clientHeader = lowercaseKeys2(this.requestOptions.headers)[header]; + clientHeader = lowercaseKeys3(this.requestOptions.headers)[header]; } return additionalHeaders[header] || clientHeader || _default; } @@ -17793,7 +17773,7 @@ var require_lib = __commonJS({ } }; exports2.HttpClient = HttpClient; - var lowercaseKeys2 = (obj) => Object.keys(obj).reduce((c, k) => (c[k.toLowerCase()] = obj[k], c), {}); + var lowercaseKeys3 = (obj) => Object.keys(obj).reduce((c, k) => (c[k.toLowerCase()] = obj[k], c), {}); } }); @@ -18349,7 +18329,7 @@ var require_util8 = __commonJS({ var { InvalidArgumentError } = require_errors2(); var { Blob: Blob4 } = require("buffer"); var nodeUtil = require("util"); - var { stringify: stringify2 } = require("querystring"); + var { stringify } = require("querystring"); var { headerNameLowerCasedRecord } = require_constants6(); var { tree } = require_tree(); var [nodeMajor, nodeMinor] = process.versions.node.split(".").map((v) => Number(v)); @@ -18374,7 +18354,7 @@ var require_util8 = __commonJS({ if (url.includes("?") || url.includes("#")) { throw new Error('Query params cannot be passed when url already contains "?" or "#".'); } - const stringified = stringify2(queryParams); + const stringified = stringify(queryParams); if (stringified) { url += "?" + stringified; } @@ -18437,8 +18417,7 @@ var require_util8 = __commonJS({ return host.substring(1, idx2); } const idx = host.indexOf(":"); - if (idx === -1) - return host; + if (idx === -1) return host; return host.substring(0, idx); } function getServerName(host) { @@ -18511,8 +18490,7 @@ var require_util8 = __commonJS({ return tree.lookup(value) ?? value.toString("latin1").toLowerCase(); } function parseHeaders(headers, obj) { - if (obj === void 0) - obj = {}; + if (obj === void 0) obj = {}; for (let i2 = 0; i2 < headers.length; i2 += 2) { const key = headerNameToString(headers[i2]); let val = obj[key]; @@ -18566,31 +18544,31 @@ var require_util8 = __commonJS({ function isBuffer(buffer) { return buffer instanceof Uint8Array || Buffer.isBuffer(buffer); } - function validateHandler(handler2, method, upgrade) { - if (!handler2 || typeof handler2 !== "object") { + function validateHandler(handler3, method, upgrade) { + if (!handler3 || typeof handler3 !== "object") { throw new InvalidArgumentError("handler must be an object"); } - if (typeof handler2.onConnect !== "function") { + if (typeof handler3.onConnect !== "function") { throw new InvalidArgumentError("invalid onConnect method"); } - if (typeof handler2.onError !== "function") { + if (typeof handler3.onError !== "function") { throw new InvalidArgumentError("invalid onError method"); } - if (typeof handler2.onBodySent !== "function" && handler2.onBodySent !== void 0) { + if (typeof handler3.onBodySent !== "function" && handler3.onBodySent !== void 0) { throw new InvalidArgumentError("invalid onBodySent method"); } if (upgrade || method === "CONNECT") { - if (typeof handler2.onUpgrade !== "function") { + if (typeof handler3.onUpgrade !== "function") { throw new InvalidArgumentError("invalid onUpgrade method"); } } else { - if (typeof handler2.onHeaders !== "function") { + if (typeof handler3.onHeaders !== "function") { throw new InvalidArgumentError("invalid onHeaders method"); } - if (typeof handler2.onData !== "function") { + if (typeof handler3.onData !== "function") { throw new InvalidArgumentError("invalid onData method"); } - if (typeof handler2.onComplete !== "function") { + if (typeof handler3.onComplete !== "function") { throw new InvalidArgumentError("invalid onComplete method"); } } @@ -18617,14 +18595,14 @@ var require_util8 = __commonJS({ }; } function ReadableStreamFrom(iterable) { - let iterator2; + let iterator3; return new ReadableStream( { async start() { - iterator2 = iterable[Symbol.asyncIterator](); + iterator3 = iterable[Symbol.asyncIterator](); }, async pull(controller) { - const { done, value } = await iterator2.next(); + const { done, value } = await iterator3.next(); if (done) { queueMicrotask(() => { controller.close(); @@ -18639,7 +18617,7 @@ var require_util8 = __commonJS({ return controller.desiredSize > 0; }, async cancel(reason) { - await iterator2.return(); + await iterator3.return(); }, type: "bytes" } @@ -18704,8 +18682,7 @@ var require_util8 = __commonJS({ return !headerCharRegex.test(characters); } function parseRangeHeader(range) { - if (range == null || range === "") - return { start: 0, end: null, size: null }; + if (range == null || range === "") return { start: 0, end: null, size: null }; const m2 = range ? range.match(/^bytes (\d+)-(\d+)\/(\d+)?$/) : null; return m2 ? { start: parseInt(m2[1]), @@ -18725,10 +18702,10 @@ var require_util8 = __commonJS({ } obj[kListeners] = null; } - function errorRequest(client, request2, err) { + function errorRequest(client, request3, err) { try { - request2.onError(err); - assert(request2.aborted); + request3.onError(err); + assert(request3.aborted); } catch (err2) { client.emit("error", err2); } @@ -18815,36 +18792,36 @@ var require_diagnostics = __commonJS({ const debuglog = fetchDebuglog.enabled ? fetchDebuglog : undiciDebugLog; diagnosticsChannel.channel("undici:client:beforeConnect").subscribe((evt) => { const { - connectParams: { version: version2, protocol, port, host } + connectParams: { version, protocol, port, host } } = evt; debuglog( "connecting to %s using %s%s", `${host}${port ? `:${port}` : ""}`, protocol, - version2 + version ); }); diagnosticsChannel.channel("undici:client:connected").subscribe((evt) => { const { - connectParams: { version: version2, protocol, port, host } + connectParams: { version, protocol, port, host } } = evt; debuglog( "connected to %s using %s%s", `${host}${port ? `:${port}` : ""}`, protocol, - version2 + version ); }); diagnosticsChannel.channel("undici:client:connectError").subscribe((evt) => { const { - connectParams: { version: version2, protocol, port, host }, + connectParams: { version, protocol, port, host }, error } = evt; debuglog( "connection to %s using %s%s errored - %s", `${host}${port ? `:${port}` : ""}`, protocol, - version2, + version, error.message ); }); @@ -18893,31 +18870,31 @@ var require_diagnostics = __commonJS({ const debuglog = undiciDebugLog.enabled ? undiciDebugLog : websocketDebuglog; diagnosticsChannel.channel("undici:client:beforeConnect").subscribe((evt) => { const { - connectParams: { version: version2, protocol, port, host } + connectParams: { version, protocol, port, host } } = evt; debuglog( "connecting to %s%s using %s%s", host, port ? `:${port}` : "", protocol, - version2 + version ); }); diagnosticsChannel.channel("undici:client:connected").subscribe((evt) => { const { - connectParams: { version: version2, protocol, port, host } + connectParams: { version, protocol, port, host } } = evt; debuglog( "connected to %s%s using %s%s", host, port ? `:${port}` : "", protocol, - version2 + version ); }); diagnosticsChannel.channel("undici:client:connectError").subscribe((evt) => { const { - connectParams: { version: version2, protocol, port, host }, + connectParams: { version, protocol, port, host }, error } = evt; debuglog( @@ -18925,7 +18902,7 @@ var require_diagnostics = __commonJS({ host, port ? `:${port}` : "", protocol, - version2, + version, error.message ); }); @@ -19009,7 +18986,7 @@ var require_request3 = __commonJS({ throwOnError, expectContinue, servername - }, handler2) { + }, handler3) { if (typeof path2 !== "string") { throw new InvalidArgumentError("path must be a string"); } else if (path2[0] !== "/" && !(path2.startsWith("http://") || path2.startsWith("https://")) && method !== "CONNECT") { @@ -19111,9 +19088,9 @@ var require_request3 = __commonJS({ } else if (headers != null) { throw new InvalidArgumentError("headers must be an object or an array"); } - validateHandler(handler2, method, upgrade); + validateHandler(handler3, method, upgrade); this.servername = servername || getServerName(this.host); - this[kHandler] = handler2; + this[kHandler] = handler3; if (channels.create.hasSubscribers) { channels.create.publish({ request: this }); } @@ -19218,7 +19195,7 @@ var require_request3 = __commonJS({ return this; } }; - function processHeader(request2, key, val) { + function processHeader(request3, key, val) { if (val && (typeof val === "object" && !Array.isArray(val))) { throw new InvalidArgumentError(`invalid ${key} header`); } else if (val === void 0) { @@ -19259,19 +19236,19 @@ var require_request3 = __commonJS({ } else { val = `${val}`; } - if (request2.host === null && headerName === "host") { + if (request3.host === null && headerName === "host") { if (typeof val !== "string") { throw new InvalidArgumentError("invalid host header"); } - request2.host = val; - } else if (request2.contentLength === null && headerName === "content-length") { - request2.contentLength = parseInt(val, 10); - if (!Number.isFinite(request2.contentLength)) { + request3.host = val; + } else if (request3.contentLength === null && headerName === "content-length") { + request3.contentLength = parseInt(val, 10); + if (!Number.isFinite(request3.contentLength)) { throw new InvalidArgumentError("invalid content-length header"); } - } else if (request2.contentType === null && headerName === "content-type") { - request2.contentType = val; - request2.headers.push(key, val); + } else if (request3.contentType === null && headerName === "content-type") { + request3.contentType = val; + request3.headers.push(key, val); } else if (headerName === "transfer-encoding" || headerName === "keep-alive" || headerName === "upgrade") { throw new InvalidArgumentError(`invalid ${headerName} header`); } else if (headerName === "connection") { @@ -19280,12 +19257,12 @@ var require_request3 = __commonJS({ throw new InvalidArgumentError("invalid connection header"); } if (value === "close") { - request2.reset = true; + request3.reset = true; } } else if (headerName === "expect") { throw new NotSupportedError("expect header not supported"); } else { - request2.headers.push(key, val); + request3.headers.push(key, val); } } module2.exports = Request2; @@ -19470,20 +19447,20 @@ var require_dispatcher_base2 = __commonJS({ queueMicrotask(onDestroyed); }); } - [kInterceptedDispatch](opts, handler2) { + [kInterceptedDispatch](opts, handler3) { if (!this[kInterceptors] || this[kInterceptors].length === 0) { this[kInterceptedDispatch] = this[kDispatch]; - return this[kDispatch](opts, handler2); + return this[kDispatch](opts, handler3); } let dispatch = this[kDispatch].bind(this); for (let i2 = this[kInterceptors].length - 1; i2 >= 0; i2--) { dispatch = this[kInterceptors][i2](dispatch); } this[kInterceptedDispatch] = dispatch; - return dispatch(opts, handler2); + return dispatch(opts, handler3); } - dispatch(opts, handler2) { - if (!handler2 || typeof handler2 !== "object") { + dispatch(opts, handler3) { + if (!handler3 || typeof handler3 !== "object") { throw new InvalidArgumentError("handler must be an object"); } try { @@ -19496,12 +19473,12 @@ var require_dispatcher_base2 = __commonJS({ if (this[kClosed]) { throw new ClientClosedError(); } - return this[kInterceptedDispatch](opts, handler2); + return this[kInterceptedDispatch](opts, handler3); } catch (err) { - if (typeof handler2.onError !== "function") { + if (typeof handler3.onError !== "function") { throw new InvalidArgumentError("invalid onError method"); } - handler2.onError(err); + handler3.onError(err); return false; } } @@ -20598,12 +20575,10 @@ var require_data_url = __commonJS({ let lead = 0; let trail = str.length - 1; if (leading) { - while (lead < str.length && predicate(str.charCodeAt(lead))) - lead++; + while (lead < str.length && predicate(str.charCodeAt(lead))) lead++; } if (trailing) { - while (trail > 0 && predicate(str.charCodeAt(trail))) - trail--; + while (trail > 0 && predicate(str.charCodeAt(trail))) trail--; } return lead === 0 && trail === str.length - 1 ? str : str.slice(lead, trail + 1); } @@ -21095,11 +21070,11 @@ var require_util9 = __commonJS({ var { isUint8Array } = require("util/types"); var { webidl } = require_webidl2(); var supportedHashes = []; - var crypto4; + var crypto; try { - crypto4 = require("crypto"); + crypto = require("crypto"); const possibleRelevantHashes = ["sha256", "sha384", "sha512"]; - supportedHashes = crypto4.getHashes().filter((hash) => possibleRelevantHashes.includes(hash)); + supportedHashes = crypto.getHashes().filter((hash) => possibleRelevantHashes.includes(hash)); } catch { } function responseURL(response) { @@ -21138,11 +21113,11 @@ var require_util9 = __commonJS({ function normalizeBinaryStringToUtf8(value) { return Buffer.from(value, "binary").toString("utf8"); } - function requestCurrentURL(request2) { - return request2.urlList[request2.urlList.length - 1]; + function requestCurrentURL(request3) { + return request3.urlList[request3.urlList.length - 1]; } - function requestBadPort(request2) { - const url = requestCurrentURL(request2); + function requestBadPort(request3) { + const url = requestCurrentURL(request3); if (urlIsHttpHttpsScheme(url) && badPortsSet.has(url.port)) { return "blocked"; } @@ -21172,7 +21147,7 @@ var require_util9 = __commonJS({ } return true; } - function setRequestReferrerPolicyOnRedirect(request2, actualResponse) { + function setRequestReferrerPolicyOnRedirect(request3, actualResponse) { const { headersList } = actualResponse; const policyHeader = (headersList.get("referrer-policy", true) ?? "").split(","); let policy = ""; @@ -21186,7 +21161,7 @@ var require_util9 = __commonJS({ } } if (policy !== "") { - request2.referrerPolicy = policy; + request3.referrerPolicy = policy; } } function crossOriginResourcePolicyCheck() { @@ -21203,33 +21178,33 @@ var require_util9 = __commonJS({ header = httpRequest.mode; httpRequest.headersList.set("sec-fetch-mode", header, true); } - function appendRequestOriginHeader(request2) { - let serializedOrigin = request2.origin; - if (request2.responseTainting === "cors" || request2.mode === "websocket") { + function appendRequestOriginHeader(request3) { + let serializedOrigin = request3.origin; + if (request3.responseTainting === "cors" || request3.mode === "websocket") { if (serializedOrigin) { - request2.headersList.append("origin", serializedOrigin, true); + request3.headersList.append("origin", serializedOrigin, true); } - } else if (request2.method !== "GET" && request2.method !== "HEAD") { - switch (request2.referrerPolicy) { + } else if (request3.method !== "GET" && request3.method !== "HEAD") { + switch (request3.referrerPolicy) { case "no-referrer": serializedOrigin = null; break; case "no-referrer-when-downgrade": case "strict-origin": case "strict-origin-when-cross-origin": - if (request2.origin && urlHasHttpsScheme(request2.origin) && !urlHasHttpsScheme(requestCurrentURL(request2))) { + if (request3.origin && urlHasHttpsScheme(request3.origin) && !urlHasHttpsScheme(requestCurrentURL(request3))) { serializedOrigin = null; } break; case "same-origin": - if (!sameOrigin(request2, requestCurrentURL(request2))) { + if (!sameOrigin(request3, requestCurrentURL(request3))) { serializedOrigin = null; } break; default: } if (serializedOrigin) { - request2.headersList.append("origin", serializedOrigin, true); + request3.headersList.append("origin", serializedOrigin, true); } } } @@ -21284,26 +21259,26 @@ var require_util9 = __commonJS({ referrerPolicy: policyContainer.referrerPolicy }; } - function determineRequestsReferrer2(request2) { - const policy = request2.referrerPolicy; + function determineRequestsReferrer2(request3) { + const policy = request3.referrerPolicy; assert(policy); let referrerSource = null; - if (request2.referrer === "client") { + if (request3.referrer === "client") { const globalOrigin = getGlobalOrigin(); if (!globalOrigin || globalOrigin.origin === "null") { return "no-referrer"; } referrerSource = new URL(globalOrigin); - } else if (request2.referrer instanceof URL) { - referrerSource = request2.referrer; + } else if (request3.referrer instanceof URL) { + referrerSource = request3.referrer; } let referrerURL = stripURLForReferrer(referrerSource); const referrerOrigin = stripURLForReferrer(referrerSource, true); if (referrerURL.toString().length > 4096) { referrerURL = referrerOrigin; } - const areSameOrigin = sameOrigin(request2, referrerURL); - const isNonPotentiallyTrustWorthy = isURLPotentiallyTrustworthy(referrerURL) && !isURLPotentiallyTrustworthy(request2.url); + const areSameOrigin = sameOrigin(request3, referrerURL); + const isNonPotentiallyTrustWorthy = isURLPotentiallyTrustworthy(referrerURL) && !isURLPotentiallyTrustworthy(request3.url); switch (policy) { case "origin": return referrerOrigin != null ? referrerOrigin : stripURLForReferrer(referrerSource, true); @@ -21314,7 +21289,7 @@ var require_util9 = __commonJS({ case "origin-when-cross-origin": return areSameOrigin ? referrerURL : referrerOrigin; case "strict-origin-when-cross-origin": { - const currentURL = requestCurrentURL(request2); + const currentURL = requestCurrentURL(request3); if (sameOrigin(referrerURL, currentURL)) { return referrerURL; } @@ -21324,7 +21299,21 @@ var require_util9 = __commonJS({ return referrerOrigin; } case "strict-origin": + // eslint-disable-line + /** + * 1. If referrerURL is a potentially trustworthy URL and + * request’s current URL is not a potentially trustworthy URL, + * then return no referrer. + * 2. Return referrerOrigin + */ case "no-referrer-when-downgrade": + // eslint-disable-line + /** + * 1. If referrerURL is a potentially trustworthy URL and + * request’s current URL is not a potentially trustworthy URL, + * then return no referrer. + * 2. Return referrerOrigin + */ default: return isNonPotentiallyTrustWorthy ? "no-referrer" : referrerOrigin; } @@ -21351,14 +21340,11 @@ var require_util9 = __commonJS({ if (url.href === "about:blank" || url.href === "about:srcdoc") { return true; } - if (url.protocol === "data:") - return true; - if (url.protocol === "file:") - return true; + if (url.protocol === "data:") return true; + if (url.protocol === "file:") return true; return isOriginPotentiallyTrustworthy2(url.origin); function isOriginPotentiallyTrustworthy2(origin) { - if (origin == null || origin === "null") - return false; + if (origin == null || origin === "null") return false; const originAsURL = new URL(origin); if (originAsURL.protocol === "https:" || originAsURL.protocol === "wss:") { return true; @@ -21370,7 +21356,7 @@ var require_util9 = __commonJS({ } } function bytesMatch(bytes, metadataList) { - if (crypto4 === void 0) { + if (crypto === void 0) { return true; } const parsedMetadata = parseMetadata(metadataList); @@ -21385,7 +21371,7 @@ var require_util9 = __commonJS({ for (const item of metadata) { const algorithm = item.algo; const expectedValue = item.hash; - let actualValue = crypto4.createHash(algorithm).update(bytes).digest("base64"); + let actualValue = crypto.createHash(algorithm).update(bytes).digest("base64"); if (actualValue[actualValue.length - 1] === "=") { if (actualValue[actualValue.length - 2] === "=") { actualValue = actualValue.slice(0, -2); @@ -21464,7 +21450,7 @@ var require_util9 = __commonJS({ } return true; } - function tryUpgradeRequestToAPotentiallyTrustworthyURL(request2) { + function tryUpgradeRequestToAPotentiallyTrustworthyURL(request3) { } function sameOrigin(A2, B) { if (A2.origin === B.origin && A2.origin === "null") { @@ -22523,12 +22509,10 @@ var require_formdata_parser = __commonJS({ let lead = 0; let trail = buf.length - 1; if (leading) { - while (lead < buf.length && predicate(buf[lead])) - lead++; + while (lead < buf.length && predicate(buf[lead])) lead++; } if (trailing) { - while (trail > 0 && predicate(buf[trail])) - trail--; + while (trail > 0 && predicate(buf[trail])) trail--; } return lead === 0 && trail === buf.length - 1 ? buf : buf.subarray(lead, trail + 1); } @@ -22680,13 +22664,13 @@ Content-Type: ${value.type || "application/octet-stream"}\r length = Buffer.byteLength(source); } if (action != null) { - let iterator2; + let iterator3; stream = new ReadableStream({ async start() { - iterator2 = action(object)[Symbol.asyncIterator](); + iterator3 = action(object)[Symbol.asyncIterator](); }, async pull(controller) { - const { value, done } = await iterator2.next(); + const { value, done } = await iterator3.next(); if (done) { queueMicrotask(() => { controller.close(); @@ -22703,7 +22687,7 @@ Content-Type: ${value.type || "application/octet-stream"}\r return controller.desiredSize > 0; }, async cancel(reason) { - await iterator2.return(); + await iterator3.return(); }, type: "bytes" }); @@ -23088,11 +23072,11 @@ var require_client_h1 = __commonJS({ if (socket.destroyed) { return -1; } - const request2 = client[kQueue][client[kRunningIdx]]; - if (!request2) { + const request3 = client[kQueue][client[kRunningIdx]]; + if (!request3) { return -1; } - request2.onResponseStarted(); + request3.onResponseStarted(); } onHeaderField(buf) { const len = this.headers.length; @@ -23133,12 +23117,12 @@ var require_client_h1 = __commonJS({ onUpgrade(head) { const { upgrade, client, socket, headers, statusCode } = this; assert(upgrade); - const request2 = client[kQueue][client[kRunningIdx]]; - assert(request2); + const request3 = client[kQueue][client[kRunningIdx]]; + assert(request3); assert(!socket.destroyed); assert(socket === client[kSocket]); assert(!this.paused); - assert(request2.upgrade || request2.method === "CONNECT"); + assert(request3.upgrade || request3.method === "CONNECT"); this.statusCode = null; this.statusText = ""; this.shouldKeepAlive = null; @@ -23156,7 +23140,7 @@ var require_client_h1 = __commonJS({ client[kQueue][client[kRunningIdx]++] = null; client.emit("disconnect", client[kUrl], [client], new InformationalError("upgrade")); try { - request2.onUpgrade(statusCode, headers, socket); + request3.onUpgrade(statusCode, headers, socket); } catch (err) { util.destroy(socket, err); } @@ -23167,8 +23151,8 @@ var require_client_h1 = __commonJS({ if (socket.destroyed) { return -1; } - const request2 = client[kQueue][client[kRunningIdx]]; - if (!request2) { + const request3 = client[kQueue][client[kRunningIdx]]; + if (!request3) { return -1; } assert(!this.upgrade); @@ -23177,23 +23161,23 @@ var require_client_h1 = __commonJS({ util.destroy(socket, new SocketError("bad response", util.getSocketInfo(socket))); return -1; } - if (upgrade && !request2.upgrade) { + if (upgrade && !request3.upgrade) { util.destroy(socket, new SocketError("bad upgrade", util.getSocketInfo(socket))); return -1; } assert.strictEqual(this.timeoutType, TIMEOUT_HEADERS); this.statusCode = statusCode; this.shouldKeepAlive = shouldKeepAlive || // Override llhttp value which does not allow keepAlive for HEAD. - request2.method === "HEAD" && !socket[kReset] && this.connection.toLowerCase() === "keep-alive"; + request3.method === "HEAD" && !socket[kReset] && this.connection.toLowerCase() === "keep-alive"; if (this.statusCode >= 200) { - const bodyTimeout = request2.bodyTimeout != null ? request2.bodyTimeout : client[kBodyTimeout]; + const bodyTimeout = request3.bodyTimeout != null ? request3.bodyTimeout : client[kBodyTimeout]; this.setTimeout(bodyTimeout, TIMEOUT_BODY); } else if (this.timeout) { if (this.timeout.refresh) { this.timeout.refresh(); } } - if (request2.method === "CONNECT") { + if (request3.method === "CONNECT") { assert(client[kRunning] === 1); this.upgrade = true; return 2; @@ -23224,11 +23208,11 @@ var require_client_h1 = __commonJS({ } else { socket[kReset] = true; } - const pause = request2.onHeaders(statusCode, headers, this.resume, statusText) === false; - if (request2.aborted) { + const pause = request3.onHeaders(statusCode, headers, this.resume, statusText) === false; + if (request3.aborted) { return -1; } - if (request2.method === "HEAD") { + if (request3.method === "HEAD") { return 1; } if (statusCode < 200) { @@ -23245,8 +23229,8 @@ var require_client_h1 = __commonJS({ if (socket.destroyed) { return -1; } - const request2 = client[kQueue][client[kRunningIdx]]; - assert(request2); + const request3 = client[kQueue][client[kRunningIdx]]; + assert(request3); assert.strictEqual(this.timeoutType, TIMEOUT_BODY); if (this.timeout) { if (this.timeout.refresh) { @@ -23259,7 +23243,7 @@ var require_client_h1 = __commonJS({ return -1; } this.bytesRead += buf.length; - if (request2.onData(buf) === false) { + if (request3.onData(buf) === false) { return constants.ERROR.PAUSED; } } @@ -23271,8 +23255,8 @@ var require_client_h1 = __commonJS({ if (upgrade) { return; } - const request2 = client[kQueue][client[kRunningIdx]]; - assert(request2); + const request3 = client[kQueue][client[kRunningIdx]]; + assert(request3); assert(statusCode >= 100); this.statusCode = null; this.statusText = ""; @@ -23286,11 +23270,11 @@ var require_client_h1 = __commonJS({ if (statusCode < 200) { return; } - if (request2.method !== "HEAD" && contentLength && bytesRead !== parseInt(contentLength, 10)) { + if (request3.method !== "HEAD" && contentLength && bytesRead !== parseInt(contentLength, 10)) { util.destroy(socket, new ResponseContentLengthMismatchError()); return -1; } - request2.onComplete(headers); + request3.onComplete(headers); client[kQueue][client[kRunningIdx]++] = null; if (socket[kWriting]) { assert.strictEqual(client[kRunning], 0); @@ -23309,15 +23293,15 @@ var require_client_h1 = __commonJS({ } } }; - function onParserTimeout(parser3) { - const { socket, timeoutType, client } = parser3; + function onParserTimeout(parser4) { + const { socket, timeoutType, client } = parser4; if (timeoutType === TIMEOUT_HEADERS) { if (!socket[kWriting] || socket.writableNeedDrain || client[kRunning] > 1) { - assert(!parser3.paused, "cannot be paused while waiting for headers"); + assert(!parser4.paused, "cannot be paused while waiting for headers"); util.destroy(socket, new HeadersTimeoutError()); } } else if (timeoutType === TIMEOUT_BODY) { - if (!parser3.paused) { + if (!parser4.paused) { util.destroy(socket, new BodyTimeoutError()); } } else if (timeoutType === TIMEOUT_IDLE) { @@ -23337,35 +23321,35 @@ var require_client_h1 = __commonJS({ socket[kBlocking] = false; socket[kParser] = new Parser(client, socket, llhttpInstance); addListener(socket, "error", function(err) { - const parser3 = this[kParser]; + const parser4 = this[kParser]; assert(err.code !== "ERR_TLS_CERT_ALTNAME_INVALID"); - if (err.code === "ECONNRESET" && parser3.statusCode && !parser3.shouldKeepAlive) { - parser3.onMessageComplete(); + if (err.code === "ECONNRESET" && parser4.statusCode && !parser4.shouldKeepAlive) { + parser4.onMessageComplete(); return; } this[kError] = err; this[kClient][kOnError](err); }); addListener(socket, "readable", function() { - const parser3 = this[kParser]; - if (parser3) { - parser3.readMore(); + const parser4 = this[kParser]; + if (parser4) { + parser4.readMore(); } }); addListener(socket, "end", function() { - const parser3 = this[kParser]; - if (parser3.statusCode && !parser3.shouldKeepAlive) { - parser3.onMessageComplete(); + const parser4 = this[kParser]; + if (parser4.statusCode && !parser4.shouldKeepAlive) { + parser4.onMessageComplete(); return; } util.destroy(this, new SocketError("other side closed", util.getSocketInfo(this))); }); addListener(socket, "close", function() { const client2 = this[kClient]; - const parser3 = this[kParser]; - if (parser3) { - if (!this[kError] && parser3.statusCode && !parser3.shouldKeepAlive) { - parser3.onMessageComplete(); + const parser4 = this[kParser]; + if (parser4) { + if (!this[kError] && parser4.statusCode && !parser4.shouldKeepAlive) { + parser4.onMessageComplete(); } this[kParser].destroy(); this[kParser] = null; @@ -23377,13 +23361,13 @@ var require_client_h1 = __commonJS({ assert(client2[kPending] === 0); const requests = client2[kQueue].splice(client2[kRunningIdx]); for (let i2 = 0; i2 < requests.length; i2++) { - const request2 = requests[i2]; - util.errorRequest(client2, request2, err); + const request3 = requests[i2]; + util.errorRequest(client2, request3, err); } } else if (client2[kRunning] > 0 && err.code !== "UND_ERR_INFO") { - const request2 = client2[kQueue][client2[kRunningIdx]]; + const request3 = client2[kQueue][client2[kRunningIdx]]; client2[kQueue][client2[kRunningIdx]++] = null; - util.errorRequest(client2, request2, err); + util.errorRequest(client2, request3, err); } client2[kPendingIdx] = client2[kRunningIdx]; assert(client2[kRunning] === 0); @@ -23413,18 +23397,18 @@ var require_client_h1 = __commonJS({ get destroyed() { return socket.destroyed; }, - busy(request2) { + busy(request3) { if (socket[kWriting] || socket[kReset] || socket[kBlocking]) { return true; } - if (request2) { - if (client[kRunning] > 0 && !request2.idempotent) { + if (request3) { + if (client[kRunning] > 0 && !request3.idempotent) { return true; } - if (client[kRunning] > 0 && (request2.upgrade || request2.method === "CONNECT")) { + if (client[kRunning] > 0 && (request3.upgrade || request3.method === "CONNECT")) { return true; } - if (client[kRunning] > 0 && util.bodyLength(request2.body) !== 0 && (util.isStream(request2.body) || util.isAsyncIterable(request2.body) || util.isFormDataLike(request2.body))) { + if (client[kRunning] > 0 && util.bodyLength(request3.body) !== 0 && (util.isStream(request3.body) || util.isAsyncIterable(request3.body) || util.isFormDataLike(request3.body))) { return true; } } @@ -23450,8 +23434,8 @@ var require_client_h1 = __commonJS({ } } else if (client[kRunning] > 0 && socket[kParser].statusCode < 200) { if (socket[kParser].timeoutType !== TIMEOUT_HEADERS) { - const request2 = client[kQueue][client[kRunningIdx]]; - const headersTimeout = request2.headersTimeout != null ? request2.headersTimeout : client[kHeadersTimeout]; + const request3 = client[kQueue][client[kRunningIdx]]; + const headersTimeout = request3.headersTimeout != null ? request3.headersTimeout : client[kHeadersTimeout]; socket[kParser].setTimeout(headersTimeout, TIMEOUT_HEADERS); } } @@ -23460,21 +23444,21 @@ var require_client_h1 = __commonJS({ function shouldSendContentLength(method) { return method !== "GET" && method !== "HEAD" && method !== "OPTIONS" && method !== "TRACE" && method !== "CONNECT"; } - function writeH1(client, request2) { - const { method, path: path2, host, upgrade, blocking, reset } = request2; - let { body, headers, contentLength } = request2; + function writeH1(client, request3) { + const { method, path: path2, host, upgrade, blocking, reset } = request3; + let { body, headers, contentLength } = request3; const expectsPayload = method === "PUT" || method === "POST" || method === "PATCH"; if (util.isFormDataLike(body)) { if (!extractBody) { extractBody = require_body2().extractBody; } const [bodyStream, contentType] = extractBody(body); - if (request2.contentType == null) { + if (request3.contentType == null) { headers.push("content-type", contentType); } body = bodyStream.stream; contentLength = bodyStream.length; - } else if (util.isBlobLike(body) && request2.contentType == null && body.type) { + } else if (util.isBlobLike(body) && request3.contentType == null && body.type) { headers.push("content-type", body.type); } if (body && typeof body.read === "function") { @@ -23483,33 +23467,33 @@ var require_client_h1 = __commonJS({ const bodyLength = util.bodyLength(body); contentLength = bodyLength ?? contentLength; if (contentLength === null) { - contentLength = request2.contentLength; + contentLength = request3.contentLength; } if (contentLength === 0 && !expectsPayload) { contentLength = null; } - if (shouldSendContentLength(method) && contentLength > 0 && request2.contentLength !== null && request2.contentLength !== contentLength) { + if (shouldSendContentLength(method) && contentLength > 0 && request3.contentLength !== null && request3.contentLength !== contentLength) { if (client[kStrictContentLength]) { - util.errorRequest(client, request2, new RequestContentLengthMismatchError()); + util.errorRequest(client, request3, new RequestContentLengthMismatchError()); return false; } process.emitWarning(new RequestContentLengthMismatchError()); } const socket = client[kSocket]; const abort = (err) => { - if (request2.aborted || request2.completed) { + if (request3.aborted || request3.completed) { return; } - util.errorRequest(client, request2, err || new RequestAbortedError()); + util.errorRequest(client, request3, err || new RequestAbortedError()); util.destroy(body); util.destroy(socket, new InformationalError("aborted")); }; try { - request2.onConnect(abort); + request3.onConnect(abort); } catch (err) { - util.errorRequest(client, request2, err); + util.errorRequest(client, request3, err); } - if (request2.aborted) { + if (request3.aborted) { return false; } if (method === "HEAD") { @@ -23560,31 +23544,31 @@ upgrade: ${upgrade}\r } } if (channels.sendHeaders.hasSubscribers) { - channels.sendHeaders.publish({ request: request2, headers: header, socket }); + channels.sendHeaders.publish({ request: request3, headers: header, socket }); } if (!body || bodyLength === 0) { - writeBuffer({ abort, body: null, client, request: request2, socket, contentLength, header, expectsPayload }); + writeBuffer({ abort, body: null, client, request: request3, socket, contentLength, header, expectsPayload }); } else if (util.isBuffer(body)) { - writeBuffer({ abort, body, client, request: request2, socket, contentLength, header, expectsPayload }); + writeBuffer({ abort, body, client, request: request3, socket, contentLength, header, expectsPayload }); } else if (util.isBlobLike(body)) { if (typeof body.stream === "function") { - writeIterable({ abort, body: body.stream(), client, request: request2, socket, contentLength, header, expectsPayload }); + writeIterable({ abort, body: body.stream(), client, request: request3, socket, contentLength, header, expectsPayload }); } else { - writeBlob({ abort, body, client, request: request2, socket, contentLength, header, expectsPayload }); + writeBlob({ abort, body, client, request: request3, socket, contentLength, header, expectsPayload }); } } else if (util.isStream(body)) { - writeStream({ abort, body, client, request: request2, socket, contentLength, header, expectsPayload }); + writeStream({ abort, body, client, request: request3, socket, contentLength, header, expectsPayload }); } else if (util.isIterable(body)) { - writeIterable({ abort, body, client, request: request2, socket, contentLength, header, expectsPayload }); + writeIterable({ abort, body, client, request: request3, socket, contentLength, header, expectsPayload }); } else { assert(false); } return true; } - function writeStream({ abort, body, client, request: request2, socket, contentLength, header, expectsPayload }) { + function writeStream({ abort, body, client, request: request3, socket, contentLength, header, expectsPayload }) { assert(contentLength !== 0 || client[kRunning] === 0, "stream body cannot be pipelined"); let finished = false; - const writer = new AsyncWriter({ abort, socket, request: request2, contentLength, client, expectsPayload, header }); + const writer = new AsyncWriter({ abort, socket, request: request3, contentLength, client, expectsPayload, header }); const onData = function(chunk) { if (finished) { return; @@ -23650,7 +23634,7 @@ upgrade: ${upgrade}\r setImmediate(onClose); } } - async function writeBuffer({ abort, body, client, request: request2, socket, contentLength, header, expectsPayload }) { + async function writeBuffer({ abort, body, client, request: request3, socket, contentLength, header, expectsPayload }) { try { if (!body) { if (contentLength === 0) { @@ -23670,18 +23654,18 @@ upgrade: ${upgrade}\r `, "latin1"); socket.write(body); socket.uncork(); - request2.onBodySent(body); + request3.onBodySent(body); if (!expectsPayload) { socket[kReset] = true; } } - request2.onRequestSent(); + request3.onRequestSent(); client[kResume](); } catch (err) { abort(err); } } - async function writeBlob({ abort, body, client, request: request2, socket, contentLength, header, expectsPayload }) { + async function writeBlob({ abort, body, client, request: request3, socket, contentLength, header, expectsPayload }) { assert(contentLength === body.size, "blob body must have content length"); try { if (contentLength != null && contentLength !== body.size) { @@ -23694,8 +23678,8 @@ upgrade: ${upgrade}\r `, "latin1"); socket.write(buffer); socket.uncork(); - request2.onBodySent(buffer); - request2.onRequestSent(); + request3.onBodySent(buffer); + request3.onRequestSent(); if (!expectsPayload) { socket[kReset] = true; } @@ -23704,7 +23688,7 @@ upgrade: ${upgrade}\r abort(err); } } - async function writeIterable({ abort, body, client, request: request2, socket, contentLength, header, expectsPayload }) { + async function writeIterable({ abort, body, client, request: request3, socket, contentLength, header, expectsPayload }) { assert(contentLength !== 0 || client[kRunning] === 0, "iterator body cannot be pipelined"); let callback = null; function onDrain() { @@ -23723,7 +23707,7 @@ upgrade: ${upgrade}\r } }); socket.on("close", onDrain).on("drain", onDrain); - const writer = new AsyncWriter({ abort, socket, request: request2, contentLength, client, expectsPayload, header }); + const writer = new AsyncWriter({ abort, socket, request: request3, contentLength, client, expectsPayload, header }); try { for await (const chunk of body) { if (socket[kError]) { @@ -23741,9 +23725,9 @@ upgrade: ${upgrade}\r } } var AsyncWriter = class { - constructor({ abort, socket, request: request2, contentLength, client, expectsPayload, header }) { + constructor({ abort, socket, request: request3, contentLength, client, expectsPayload, header }) { this.socket = socket; - this.request = request2; + this.request = request3; this.contentLength = contentLength; this.client = client; this.bytesWritten = 0; @@ -23753,7 +23737,7 @@ upgrade: ${upgrade}\r socket[kWriting] = true; } write(chunk) { - const { socket, request: request2, contentLength, client, bytesWritten, expectsPayload, header } = this; + const { socket, request: request3, contentLength, client, bytesWritten, expectsPayload, header } = this; if (socket[kError]) { throw socket[kError]; } @@ -23792,7 +23776,7 @@ ${len.toString(16)}\r this.bytesWritten += len; const ret = socket.write(chunk); socket.uncork(); - request2.onBodySent(chunk); + request3.onBodySent(chunk); if (!ret) { if (socket[kParser].timeout && socket[kParser].timeoutType === TIMEOUT_HEADERS) { if (socket[kParser].timeout.refresh) { @@ -23803,8 +23787,8 @@ ${len.toString(16)}\r return ret; } end() { - const { socket, contentLength, client, bytesWritten, expectsPayload, header, request: request2 } = this; - request2.onRequestSent(); + const { socket, contentLength, client, bytesWritten, expectsPayload, header, request: request3 } = this; + request3.onRequestSent(); socket[kWriting] = false; if (socket[kError]) { throw socket[kError]; @@ -23940,8 +23924,8 @@ var require_client_h2 = __commonJS({ assert(client2[kPending] === 0); const requests = client2[kQueue].splice(client2[kRunningIdx]); for (let i2 = 0; i2 < requests.length; i2++) { - const request2 = requests[i2]; - util.errorRequest(client2, request2, err); + const request3 = requests[i2]; + util.errorRequest(client2, request3, err); } client2[kPendingIdx] = client2[kRunningIdx]; assert(client2[kRunning] === 0); @@ -24015,14 +23999,14 @@ var require_client_h2 = __commonJS({ function shouldSendContentLength(method) { return method !== "GET" && method !== "HEAD" && method !== "OPTIONS" && method !== "TRACE" && method !== "CONNECT"; } - function writeH2(client, request2) { + function writeH2(client, request3) { const session = client[kHTTP2Session]; - const { body, method, path: path2, host, upgrade, expectContinue, signal, headers: reqHeaders } = request2; + const { body, method, path: path2, host, upgrade, expectContinue, signal, headers: reqHeaders } = request3; if (upgrade) { - util.errorRequest(client, request2, new Error("Upgrade not supported for H2")); + util.errorRequest(client, request3, new Error("Upgrade not supported for H2")); return false; } - if (request2.aborted) { + if (request3.aborted) { return false; } const headers = {}; @@ -24046,8 +24030,8 @@ var require_client_h2 = __commonJS({ headers[HTTP2_HEADER_AUTHORITY] = host || `${hostname}${port ? `:${port}` : ""}`; headers[HTTP2_HEADER_METHOD] = method; try { - request2.onConnect((err) => { - if (request2.aborted || request2.completed) { + request3.onConnect((err) => { + if (request3.aborted || request3.completed) { return; } err = err || new RequestAbortedError(); @@ -24058,27 +24042,26 @@ var require_client_h2 = __commonJS({ session.unref(); } } - util.errorRequest(client, request2, err); + util.errorRequest(client, request3, err); }); } catch (err) { - util.errorRequest(client, request2, err); + util.errorRequest(client, request3, err); } if (method === "CONNECT") { session.ref(); stream = session.request(headers, { endStream: false, signal }); if (stream.id && !stream.pending) { - request2.onUpgrade(null, null, stream); + request3.onUpgrade(null, null, stream); ++session[kOpenStreams]; } else { stream.once("ready", () => { - request2.onUpgrade(null, null, stream); + request3.onUpgrade(null, null, stream); ++session[kOpenStreams]; }); } stream.once("close", () => { session[kOpenStreams] -= 1; - if (session[kOpenStreams] === 0) - session.unref(); + if (session[kOpenStreams] === 0) session.unref(); }); return true; } @@ -24090,14 +24073,14 @@ var require_client_h2 = __commonJS({ } let contentLength = util.bodyLength(body); if (contentLength == null) { - contentLength = request2.contentLength; + contentLength = request3.contentLength; } if (contentLength === 0 || !expectsPayload) { contentLength = null; } - if (shouldSendContentLength(method) && contentLength > 0 && request2.contentLength != null && request2.contentLength !== contentLength) { + if (shouldSendContentLength(method) && contentLength > 0 && request3.contentLength != null && request3.contentLength !== contentLength) { if (client[kStrictContentLength]) { - util.errorRequest(client, request2, new RequestContentLengthMismatchError()); + util.errorRequest(client, request3, new RequestContentLengthMismatchError()); return false; } process.emitWarning(new RequestContentLengthMismatchError()); @@ -24122,25 +24105,25 @@ var require_client_h2 = __commonJS({ ++session[kOpenStreams]; stream.once("response", (headers2) => { const { [HTTP2_HEADER_STATUS]: statusCode, ...realHeaders } = headers2; - request2.onResponseStarted(); - if (request2.aborted || request2.completed) { + request3.onResponseStarted(); + if (request3.aborted || request3.completed) { const err = new RequestAbortedError(); - util.errorRequest(client, request2, err); + util.errorRequest(client, request3, err); util.destroy(stream, err); return; } - if (request2.onHeaders(Number(statusCode), parseH2Headers(realHeaders), stream.resume.bind(stream), "") === false) { + if (request3.onHeaders(Number(statusCode), parseH2Headers(realHeaders), stream.resume.bind(stream), "") === false) { stream.pause(); } stream.on("data", (chunk) => { - if (request2.onData(chunk) === false) { + if (request3.onData(chunk) === false) { stream.pause(); } }); }); stream.once("end", () => { if (stream.state?.state == null || stream.state.state < 6) { - request2.onComplete([]); + request3.onComplete([]); return; } session[kOpenStreams] -= 1; @@ -24148,7 +24131,7 @@ var require_client_h2 = __commonJS({ session.unref(); } const err = new InformationalError("HTTP/2: stream half-closed (remote)"); - util.errorRequest(client, request2, err); + util.errorRequest(client, request3, err); util.destroy(stream, err); }); stream.once("close", () => { @@ -24160,13 +24143,13 @@ var require_client_h2 = __commonJS({ stream.once("error", function(err) { if (client[kHTTP2Session] && !client[kHTTP2Session].destroyed && !this.closed && !this.destroyed) { session[kOpenStreams] -= 1; - util.errorRequest(client, request2, err); + util.errorRequest(client, request3, err); util.destroy(stream, err); } }); stream.once("frameError", (type, code) => { const err = new InformationalError(`HTTP/2: "frameError" received - type ${type}, code ${code}`); - util.errorRequest(client, request2, err); + util.errorRequest(client, request3, err); if (client[kHTTP2Session] && !client[kHTTP2Session].destroyed && !this.closed && !this.destroyed) { session[kOpenStreams] -= 1; util.destroy(stream, err); @@ -24175,20 +24158,20 @@ var require_client_h2 = __commonJS({ return true; function writeBodyH2() { if (!body) { - request2.onRequestSent(); + request3.onRequestSent(); } else if (util.isBuffer(body)) { assert(contentLength === body.byteLength, "buffer body must have content length"); stream.cork(); stream.write(body); stream.uncork(); stream.end(); - request2.onBodySent(body); - request2.onRequestSent(); + request3.onBodySent(body); + request3.onRequestSent(); } else if (util.isBlobLike(body)) { if (typeof body.stream === "function") { writeIterable({ client, - request: request2, + request: request3, contentLength, h2stream: stream, expectsPayload, @@ -24200,7 +24183,7 @@ var require_client_h2 = __commonJS({ writeBlob({ body, client, - request: request2, + request: request3, contentLength, expectsPayload, h2stream: stream, @@ -24212,7 +24195,7 @@ var require_client_h2 = __commonJS({ writeStream({ body, client, - request: request2, + request: request3, contentLength, expectsPayload, socket: client[kSocket], @@ -24223,7 +24206,7 @@ var require_client_h2 = __commonJS({ writeIterable({ body, client, - request: request2, + request: request3, contentLength, expectsPayload, header: "", @@ -24235,7 +24218,7 @@ var require_client_h2 = __commonJS({ } } } - function writeStream({ h2stream, body, client, request: request2, socket, contentLength, header, expectsPayload }) { + function writeStream({ h2stream, body, client, request: request3, socket, contentLength, header, expectsPayload }) { assert(contentLength !== 0 || client[kRunning] === 0, "stream body cannot be pipelined"); const pipe = pipeline2( body, @@ -24245,7 +24228,7 @@ var require_client_h2 = __commonJS({ util.destroy(body, err); util.destroy(h2stream, err); } else { - request2.onRequestSent(); + request3.onRequestSent(); } } ); @@ -24255,10 +24238,10 @@ var require_client_h2 = __commonJS({ util.destroy(pipe); }); function onPipeData(chunk) { - request2.onBodySent(chunk); + request3.onBodySent(chunk); } } - async function writeBlob({ h2stream, body, client, request: request2, socket, contentLength, header, expectsPayload }) { + async function writeBlob({ h2stream, body, client, request: request3, socket, contentLength, header, expectsPayload }) { assert(contentLength === body.size, "blob body must have content length"); try { if (contentLength != null && contentLength !== body.size) { @@ -24268,8 +24251,8 @@ var require_client_h2 = __commonJS({ h2stream.cork(); h2stream.write(buffer); h2stream.uncork(); - request2.onBodySent(buffer); - request2.onRequestSent(); + request3.onBodySent(buffer); + request3.onRequestSent(); if (!expectsPayload) { socket[kReset] = true; } @@ -24278,7 +24261,7 @@ var require_client_h2 = __commonJS({ util.destroy(h2stream); } } - async function writeIterable({ h2stream, body, client, request: request2, socket, contentLength, header, expectsPayload }) { + async function writeIterable({ h2stream, body, client, request: request3, socket, contentLength, header, expectsPayload }) { assert(contentLength !== 0 || client[kRunning] === 0, "iterator body cannot be pipelined"); let callback = null; function onDrain() { @@ -24303,7 +24286,7 @@ var require_client_h2 = __commonJS({ throw socket[kError]; } const res = h2stream.write(chunk); - request2.onBodySent(chunk); + request3.onBodySent(chunk); if (!res) { await waitForDrain(); } @@ -24311,7 +24294,7 @@ var require_client_h2 = __commonJS({ } catch (err) { h2stream.destroy(err); } finally { - request2.onRequestSent(); + request3.onRequestSent(); h2stream.end(); h2stream.off("close", onDrain).off("drain", onDrain); } @@ -24343,17 +24326,17 @@ var require_redirect_handler = __commonJS({ } }; var RedirectHandler = class { - constructor(dispatch, maxRedirections, opts, handler2) { + constructor(dispatch, maxRedirections, opts, handler3) { if (maxRedirections != null && (!Number.isInteger(maxRedirections) || maxRedirections < 0)) { throw new InvalidArgumentError("maxRedirections must be a positive number"); } - util.validateHandler(handler2, opts.method, opts.upgrade); + util.validateHandler(handler3, opts.method, opts.upgrade); this.dispatch = dispatch; this.location = null; this.abort = null; this.opts = { ...opts, maxRedirections: 0 }; this.maxRedirections = maxRedirections; - this.handler = handler2; + this.handler = handler3; this.history = []; this.redirectionLimitReached = false; if (util.isStream(this.opts.body)) { @@ -24486,12 +24469,12 @@ var require_redirect_interceptor = __commonJS({ var RedirectHandler = require_redirect_handler(); function createRedirectInterceptor({ maxRedirections: defaultMaxRedirections }) { return (dispatch) => { - return function Intercept(opts, handler2) { + return function Intercept(opts, handler3) { const { maxRedirections = defaultMaxRedirections } = opts; if (!maxRedirections) { - return dispatch(opts, handler2); + return dispatch(opts, handler3); } - const redirectHandler = new RedirectHandler(dispatch, maxRedirections, opts, handler2); + const redirectHandler = new RedirectHandler(dispatch, maxRedirections, opts, handler3); opts = { ...opts, maxRedirections: 0 }; return dispatch(opts, redirectHandler); }; @@ -24747,12 +24730,12 @@ var require_client2 = __commonJS({ connect(this); this.once("connect", cb); } - [kDispatch](opts, handler2) { + [kDispatch](opts, handler3) { const origin = opts.origin || this[kUrl].origin; - const request2 = new Request2(origin, opts, handler2); - this[kQueue].push(request2); + const request3 = new Request2(origin, opts, handler3); + this[kQueue].push(request3); if (this[kResuming]) { - } else if (util.bodyLength(request2.body) == null && util.isIterable(request2.body)) { + } else if (util.bodyLength(request3.body) == null && util.isIterable(request3.body)) { this[kResuming] = 1; queueMicrotask(() => resume(this)); } else { @@ -24776,8 +24759,8 @@ var require_client2 = __commonJS({ return new Promise((resolve) => { const requests = this[kQueue].splice(this[kPendingIdx]); for (let i2 = 0; i2 < requests.length; i2++) { - const request2 = requests[i2]; - util.errorRequest(this, request2, err); + const request3 = requests[i2]; + util.errorRequest(this, request3, err); } const callback = () => { if (this[kClosedResolve]) { @@ -24802,8 +24785,8 @@ var require_client2 = __commonJS({ assert(client[kPendingIdx] === client[kRunningIdx]); const requests = client[kQueue].splice(client[kRunningIdx]); for (let i2 = 0; i2 < requests.length; i2++) { - const request2 = requests[i2]; - util.errorRequest(client, request2, err); + const request3 = requests[i2]; + util.errorRequest(client, request3, err); } assert(client[kSize] === 0); } @@ -24908,8 +24891,8 @@ var require_client2 = __commonJS({ if (err.code === "ERR_TLS_CERT_ALTNAME_INVALID") { assert(client[kRunning] === 0); while (client[kPending] > 0 && client[kQueue][client[kPendingIdx]].servername === client[kServerName]) { - const request2 = client[kQueue][client[kPendingIdx]++]; - util.errorRequest(client, request2, err); + const request3 = client[kQueue][client[kPendingIdx]++]; + util.errorRequest(client, request3, err); } } else { onError2(client, err); @@ -24966,12 +24949,12 @@ var require_client2 = __commonJS({ if (client[kRunning] >= (getPipelining(client) || 1)) { return; } - const request2 = client[kQueue][client[kPendingIdx]]; - if (client[kUrl].protocol === "https:" && client[kServerName] !== request2.servername) { + const request3 = client[kQueue][client[kPendingIdx]]; + if (client[kUrl].protocol === "https:" && client[kServerName] !== request3.servername) { if (client[kRunning] > 0) { return; } - client[kServerName] = request2.servername; + client[kServerName] = request3.servername; client[kHTTPContext]?.destroy(new InformationalError("servername changed"), () => { client[kHTTPContext] = null; resume(client); @@ -24987,10 +24970,10 @@ var require_client2 = __commonJS({ if (client[kHTTPContext].destroyed) { return; } - if (client[kHTTPContext].busy(request2)) { + if (client[kHTTPContext].busy(request3)) { return; } - if (!request2.aborted && client[kHTTPContext].write(request2)) { + if (!request3.aborted && client[kHTTPContext].write(request3)) { client[kPendingIdx]++; } else { client[kQueue].splice(client[kPendingIdx], 1); @@ -25201,13 +25184,13 @@ var require_pool_base2 = __commonJS({ } return Promise.all(this[kClients].map((c) => c.destroy(err))); } - [kDispatch](opts, handler2) { + [kDispatch](opts, handler3) { const dispatcher = this[kGetDispatcher](); if (!dispatcher) { this[kNeedDrain] = true; - this[kQueue].push({ opts, handler: handler2 }); + this[kQueue].push({ opts, handler: handler3 }); this[kQueued]++; - } else if (!dispatcher.dispatch(opts, handler2)) { + } else if (!dispatcher.dispatch(opts, handler3)) { dispatcher[kNeedDrain] = true; this[kNeedDrain] = !this[kGetDispatcher](); } @@ -25357,8 +25340,7 @@ var require_balanced_pool2 = __commonJS({ var kMaxWeightPerServer = Symbol("kMaxWeightPerServer"); var kErrorPenalty = Symbol("kErrorPenalty"); function getGreatestCommonDivisor(a, b) { - if (b === 0) - return a; + if (b === 0) return a; return getGreatestCommonDivisor(b, a % b); } function defaultFactory(origin, opts) { @@ -25527,7 +25509,7 @@ var require_agent2 = __commonJS({ } return ret; } - [kDispatch](opts, handler2) { + [kDispatch](opts, handler3) { let key; if (opts.origin && (typeof opts.origin === "string" || opts.origin instanceof URL)) { key = String(opts.origin); @@ -25539,7 +25521,7 @@ var require_agent2 = __commonJS({ dispatcher = this[kFactory](opts.origin, this[kOptions]).on("drain", this[kOnDrain]).on("connect", this[kOnConnect]).on("disconnect", this[kOnDisconnect]).on("connectionError", this[kOnConnectionError]); this[kClients].set(key, dispatcher); } - return dispatcher.dispatch(opts, handler2); + return dispatcher.dispatch(opts, handler3); } async [kClose]() { const closePromises = []; @@ -25567,7 +25549,7 @@ var require_proxy_agent2 = __commonJS({ "node_modules/undici/lib/dispatcher/proxy-agent.js"(exports2, module2) { "use strict"; var { kProxy, kClose, kDestroy, kInterceptors } = require_symbols6(); - var { URL: URL3 } = require("url"); + var { URL: URL2 } = require("url"); var Agent = require_agent2(); var Pool = require_pool2(); var DispatcherBase = require_dispatcher_base2(); @@ -25588,7 +25570,7 @@ var require_proxy_agent2 = __commonJS({ var ProxyAgent2 = class extends DispatcherBase { constructor(opts) { super(); - if (!opts || typeof opts === "object" && !(opts instanceof URL3) && !opts.uri) { + if (!opts || typeof opts === "object" && !(opts instanceof URL2) && !opts.uri) { throw new InvalidArgumentError("Proxy uri is mandatory"); } const { clientFactory = defaultFactory } = opts; @@ -25659,11 +25641,11 @@ var require_proxy_agent2 = __commonJS({ } }); } - dispatch(opts, handler2) { + dispatch(opts, handler3) { const headers = buildHeaders(opts.headers); throwIfProxyAuthIsSent(headers); if (headers && !("host" in headers) && !("Host" in headers)) { - const { host } = new URL3(opts.origin); + const { host } = new URL2(opts.origin); headers.host = host; } return this[kAgent].dispatch( @@ -25671,7 +25653,7 @@ var require_proxy_agent2 = __commonJS({ ...opts, headers }, - handler2 + handler3 ); } /** @@ -25680,11 +25662,11 @@ var require_proxy_agent2 = __commonJS({ */ #getUrl(opts) { if (typeof opts === "string") { - return new URL3(opts); - } else if (opts instanceof URL3) { + return new URL2(opts); + } else if (opts instanceof URL2) { return opts; } else { - return new URL3(opts.uri); + return new URL2(opts.uri); } } async [kClose]() { @@ -25809,8 +25791,7 @@ var require_retry_handler = __commonJS({ } } onBodySent(chunk) { - if (this.handler.onBodySent) - return this.handler.onBodySent(chunk); + if (this.handler.onBodySent) return this.handler.onBodySent(chunk); } static [kRetryHandlerDefaultRetry](err, { state, opts }, cb) { const { statusCode, code, headers } = err; @@ -26013,13 +25994,13 @@ var require_retry_agent = __commonJS({ this.#agent = agent; this.#options = options; } - dispatch(opts, handler2) { + dispatch(opts, handler3) { const retry = new RetryHandler({ ...opts, retryOptions: this.#options }, { dispatch: this.#agent.dispatch.bind(this.#agent), - handler: handler2 + handler: handler3 }); return this.#agent.dispatch(opts, retry); } @@ -26049,7 +26030,7 @@ var require_readable2 = __commonJS({ var kAbort = Symbol("kAbort"); var kContentType = Symbol("kContentType"); var kContentLength = Symbol("kContentLength"); - var noop3 = () => { + var noop4 = () => { }; var BodyReadable = class extends Readable { constructor({ @@ -26173,7 +26154,7 @@ var require_readable2 = __commonJS({ } else { resolve(null); } - }).on("error", noop3).on("data", function(chunk) { + }).on("error", noop4).on("data", function(chunk) { limit -= chunk.length; if (limit <= 0) { this.destroy(); @@ -26547,10 +26528,10 @@ var require_api_request2 = __commonJS({ } } }; - function request2(opts, callback) { + function request3(opts, callback) { if (callback === void 0) { return new Promise((resolve, reject) => { - request2.call(this, opts, (err, data) => { + request3.call(this, opts, (err, data) => { return err ? reject(err) : resolve(data); }); }); @@ -26565,7 +26546,7 @@ var require_api_request2 = __commonJS({ queueMicrotask(() => callback(err, { opaque })); } } - module2.exports = request2; + module2.exports = request3; module2.exports.RequestHandler = RequestHandler; } }); @@ -26795,11 +26776,11 @@ var require_api_pipeline2 = __commonJS({ } }; var PipelineHandler = class extends AsyncResource { - constructor(opts, handler2) { + constructor(opts, handler3) { if (!opts || typeof opts !== "object") { throw new InvalidArgumentError("invalid opts"); } - if (typeof handler2 !== "function") { + if (typeof handler3 !== "function") { throw new InvalidArgumentError("invalid handler"); } const { signal, method, opaque, onInfo, responseHeaders } = opts; @@ -26815,7 +26796,7 @@ var require_api_pipeline2 = __commonJS({ super("UNDICI_PIPELINE"); this.opaque = opaque || null; this.responseHeaders = responseHeaders || null; - this.handler = handler2; + this.handler = handler3; this.abort = null; this.context = null; this.onInfo = onInfo || null; @@ -26870,7 +26851,7 @@ var require_api_pipeline2 = __commonJS({ this.context = context2; } onHeaders(statusCode, rawHeaders, resume) { - const { opaque, handler: handler2, context: context2 } = this; + const { opaque, handler: handler3, context: context2 } = this; if (statusCode < 200) { if (this.onInfo) { const headers = this.responseHeaders === "raw" ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders); @@ -26883,7 +26864,7 @@ var require_api_pipeline2 = __commonJS({ try { this.handler = null; const headers = this.responseHeaders === "raw" ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders); - body = this.runInAsyncScope(handler2, null, { + body = this.runInAsyncScope(handler3, null, { statusCode, headers, opaque, @@ -26930,9 +26911,9 @@ var require_api_pipeline2 = __commonJS({ util.destroy(ret, err); } }; - function pipeline2(opts, handler2) { + function pipeline2(opts, handler3) { try { - const pipelineHandler = new PipelineHandler(opts, handler2); + const pipelineHandler = new PipelineHandler(opts, handler3); this.dispatch({ ...opts, body: pipelineHandler.req }, pipelineHandler); return pipelineHandler.ret; } catch (err) { @@ -27285,7 +27266,7 @@ var require_mock_utils2 = __commonJS({ const headersMatch = matchHeaders(mockDispatch2, headers); return pathMatch && methodMatch && bodyMatch && headersMatch; } - function getResponseData2(data) { + function getResponseData3(data) { if (Buffer.isBuffer(data)) { return data; } else if (typeof data === "object") { @@ -27371,7 +27352,7 @@ var require_mock_utils2 = __commonJS({ } return Buffer.concat(buffers).toString("utf8"); } - function mockDispatch(opts, handler2) { + function mockDispatch(opts, handler3) { const key = buildKey(opts); const mockDispatch2 = getMockDispatch(this[kDispatches], key); mockDispatch2.timesInvoked++; @@ -27384,7 +27365,7 @@ var require_mock_utils2 = __commonJS({ mockDispatch2.pending = timesInvoked < times; if (error !== null) { deleteMockDispatch(this[kDispatches], key); - handler2.onError(error); + handler3.onError(error); return true; } if (typeof delay2 === "number" && delay2 > 0) { @@ -27401,13 +27382,13 @@ var require_mock_utils2 = __commonJS({ body.then((newData) => handleReply(mockDispatches, newData)); return; } - const responseData = getResponseData2(body); + const responseData = getResponseData3(body); const responseHeaders = generateKeyValues(headers); const responseTrailers = generateKeyValues(trailers); - handler2.onConnect?.((err) => handler2.onError(err), null); - handler2.onHeaders?.(statusCode, responseHeaders, resume, getStatusText(statusCode)); - handler2.onData?.(Buffer.from(responseData)); - handler2.onComplete?.(responseTrailers); + handler3.onConnect?.((err) => handler3.onError(err), null); + handler3.onHeaders?.(statusCode, responseHeaders, resume, getStatusText(statusCode)); + handler3.onData?.(Buffer.from(responseData)); + handler3.onComplete?.(responseTrailers); deleteMockDispatch(mockDispatches, key); } function resume() { @@ -27418,10 +27399,10 @@ var require_mock_utils2 = __commonJS({ const agent = this[kMockAgent]; const origin = this[kOrigin]; const originalDispatch = this[kOriginalDispatch]; - return function dispatch(opts, handler2) { + return function dispatch(opts, handler3) { if (agent.isMockActive) { try { - mockDispatch.call(this, opts, handler2); + mockDispatch.call(this, opts, handler3); } catch (error) { if (error instanceof MockNotMatchedError) { const netConnect = agent[kGetNetConnect](); @@ -27429,7 +27410,7 @@ var require_mock_utils2 = __commonJS({ throw new MockNotMatchedError(`${error.message}: subsequent request to origin ${origin} was not allowed (net.connect disabled)`); } if (checkNetConnect(netConnect, origin)) { - originalDispatch.call(this, opts, handler2); + originalDispatch.call(this, opts, handler3); } else { throw new MockNotMatchedError(`${error.message}: subsequent request to origin ${origin} was not allowed (net.connect is not enabled for this origin)`); } @@ -27438,7 +27419,7 @@ var require_mock_utils2 = __commonJS({ } } } else { - originalDispatch.call(this, opts, handler2); + originalDispatch.call(this, opts, handler3); } }; } @@ -27458,7 +27439,7 @@ var require_mock_utils2 = __commonJS({ } } module2.exports = { - getResponseData: getResponseData2, + getResponseData: getResponseData3, getMockDispatch, addMockDispatch, deleteMockDispatch, @@ -27481,7 +27462,7 @@ var require_mock_utils2 = __commonJS({ var require_mock_interceptor2 = __commonJS({ "node_modules/undici/lib/mock/mock-interceptor.js"(exports2, module2) { "use strict"; - var { getResponseData: getResponseData2, buildKey, addMockDispatch } = require_mock_utils2(); + var { getResponseData: getResponseData3, buildKey, addMockDispatch } = require_mock_utils2(); var { kDispatches, kDispatchKey, @@ -27553,7 +27534,7 @@ var require_mock_interceptor2 = __commonJS({ this[kContentLength] = false; } createMockScopeDispatchData({ statusCode, data, responseOptions }) { - const responseData = getResponseData2(data); + const responseData = getResponseData3(data); const contentLength = this[kContentLength] ? { "content-length": responseData.length } : {}; const headers = { ...this[kDefaultHeaders], ...contentLength, ...responseOptions.headers }; const trailers = { ...this[kDefaultTrailers], ...responseOptions.trailers }; @@ -27862,9 +27843,9 @@ var require_mock_agent2 = __commonJS({ } return dispatcher; } - dispatch(opts, handler2) { + dispatch(opts, handler3) { this.get(opts.origin); - return this[kAgent].dispatch(opts, handler2); + return this[kAgent].dispatch(opts, handler3); } async close() { await this[kAgent].close(); @@ -27983,8 +27964,8 @@ var require_decorator_handler = __commonJS({ "node_modules/undici/lib/handler/decorator-handler.js"(exports2, module2) { "use strict"; module2.exports = class DecoratorHandler { - constructor(handler2) { - this.handler = handler2; + constructor(handler3) { + this.handler = handler3; } onConnect(...args) { return this.handler.onConnect(...args); @@ -28019,16 +28000,16 @@ var require_redirect = __commonJS({ module2.exports = (opts) => { const globalMaxRedirections = opts?.maxRedirections; return (dispatch) => { - return function redirectInterceptor(opts2, handler2) { + return function redirectInterceptor(opts2, handler3) { const { maxRedirections = globalMaxRedirections, ...baseOpts } = opts2; if (!maxRedirections) { - return dispatch(opts2, handler2); + return dispatch(opts2, handler3); } const redirectHandler = new RedirectHandler( dispatch, maxRedirections, opts2, - handler2 + handler3 ); return dispatch(baseOpts, redirectHandler); }; @@ -28044,13 +28025,13 @@ var require_retry = __commonJS({ var RetryHandler = require_retry_handler(); module2.exports = (globalOpts) => { return (dispatch) => { - return function retryInterceptor(opts, handler2) { + return function retryInterceptor(opts, handler3) { return dispatch( opts, new RetryHandler( { ...opts, retryOptions: { ...globalOpts, ...opts.retryOptions } }, { - handler: handler2, + handler: handler3, dispatch } ) @@ -28084,10 +28065,8 @@ var require_headers2 = __commonJS({ function headerValueNormalize(potentialValue) { let i2 = 0; let j = potentialValue.length; - while (j > i2 && isHTTPWhiteSpaceCharCode(potentialValue.charCodeAt(j - 1))) - --j; - while (j > i2 && isHTTPWhiteSpaceCharCode(potentialValue.charCodeAt(i2))) - ++i2; + while (j > i2 && isHTTPWhiteSpaceCharCode(potentialValue.charCodeAt(j - 1))) --j; + while (j > i2 && isHTTPWhiteSpaceCharCode(potentialValue.charCodeAt(i2))) ++i2; return i2 === 0 && j === potentialValue.length ? potentialValue : potentialValue.substring(i2, j); } function fill(headers, object) { @@ -28209,8 +28188,7 @@ var require_headers2 = __commonJS({ */ delete(name, isLowerCase) { this[kHeadersSortedMap] = null; - if (!isLowerCase) - name = name.toLowerCase(); + if (!isLowerCase) name = name.toLowerCase(); if (name === "set-cookie") { this.cookies = null; } @@ -28247,12 +28225,12 @@ var require_headers2 = __commonJS({ if (size === 0) { return array; } - const iterator2 = this[kHeadersMap][Symbol.iterator](); - const firstValue = iterator2.next().value; + const iterator3 = this[kHeadersMap][Symbol.iterator](); + const firstValue = iterator3.next().value; array[0] = [firstValue[0], firstValue[1].value]; assert(firstValue[1].value !== null); for (let i2 = 1, j = 0, right = 0, left = 0, pivot = 0, x2, value; i2 < size; ++i2) { - value = iterator2.next().value; + value = iterator3.next().value; x2 = array[i2] = [value[0], value[1].value]; assert(x2[1] !== null); left = 0; @@ -28273,7 +28251,7 @@ var require_headers2 = __commonJS({ array[left] = x2; } } - if (!iterator2.next().done) { + if (!iterator3.next().done) { throw new TypeError("Unreachable"); } return array; @@ -28437,9 +28415,9 @@ var require_headers2 = __commonJS({ }); webidl.converters.HeadersInit = function(V) { if (webidl.util.Type(V) === "Object") { - const iterator2 = Reflect.get(V, Symbol.iterator); - if (typeof iterator2 === "function") { - return webidl.converters["sequence>"](V, iterator2.bind(V)); + const iterator3 = Reflect.get(V, Symbol.iterator); + if (typeof iterator3 === "function") { + return webidl.converters["sequence>"](V, iterator3.bind(V)); } return webidl.converters["record"](V); } @@ -28956,7 +28934,7 @@ var require_request4 = __commonJS({ policyContainer: makePolicyContainer() } }; - let request2 = null; + let request3 = null; let fallbackMode = null; const baseUrl = this[kRealm].settingsObject.baseUrl; let signal = null; @@ -28973,18 +28951,18 @@ var require_request4 = __commonJS({ "Request cannot be constructed from a URL that includes credentials: " + input ); } - request2 = makeRequest({ urlList: [parsedURL] }); + request3 = makeRequest({ urlList: [parsedURL] }); fallbackMode = "cors"; } else { this[kDispatcher] = init.dispatcher || input[kDispatcher]; assert(input instanceof _Request); - request2 = input[kState]; + request3 = input[kState]; signal = input[kSignal]; } const origin = this[kRealm].settingsObject.origin; let window2 = "client"; - if (request2.window?.constructor?.name === "EnvironmentSettingsObject" && sameOrigin(request2.window, origin)) { - window2 = request2.window; + if (request3.window?.constructor?.name === "EnvironmentSettingsObject" && sameOrigin(request3.window, origin)) { + window2 = request3.window; } if (init.window != null) { throw new TypeError(`'window' option '${window2}' must be null`); @@ -28992,66 +28970,66 @@ var require_request4 = __commonJS({ if ("window" in init) { window2 = "no-window"; } - request2 = makeRequest({ + request3 = makeRequest({ // URL request’s URL. // undici implementation note: this is set as the first item in request's urlList in makeRequest // method request’s method. - method: request2.method, + method: request3.method, // header list A copy of request’s header list. // undici implementation note: headersList is cloned in makeRequest - headersList: request2.headersList, + headersList: request3.headersList, // unsafe-request flag Set. - unsafeRequest: request2.unsafeRequest, + unsafeRequest: request3.unsafeRequest, // client This’s relevant settings object. client: this[kRealm].settingsObject, // window window. window: window2, // priority request’s priority. - priority: request2.priority, + priority: request3.priority, // origin request’s origin. The propagation of the origin is only significant for navigation requests // being handled by a service worker. In this scenario a request can have an origin that is different // from the current client. - origin: request2.origin, + origin: request3.origin, // referrer request’s referrer. - referrer: request2.referrer, + referrer: request3.referrer, // referrer policy request’s referrer policy. - referrerPolicy: request2.referrerPolicy, + referrerPolicy: request3.referrerPolicy, // mode request’s mode. - mode: request2.mode, + mode: request3.mode, // credentials mode request’s credentials mode. - credentials: request2.credentials, + credentials: request3.credentials, // cache mode request’s cache mode. - cache: request2.cache, + cache: request3.cache, // redirect mode request’s redirect mode. - redirect: request2.redirect, + redirect: request3.redirect, // integrity metadata request’s integrity metadata. - integrity: request2.integrity, + integrity: request3.integrity, // keepalive request’s keepalive. - keepalive: request2.keepalive, + keepalive: request3.keepalive, // reload-navigation flag request’s reload-navigation flag. - reloadNavigation: request2.reloadNavigation, + reloadNavigation: request3.reloadNavigation, // history-navigation flag request’s history-navigation flag. - historyNavigation: request2.historyNavigation, + historyNavigation: request3.historyNavigation, // URL list A clone of request’s URL list. - urlList: [...request2.urlList] + urlList: [...request3.urlList] }); const initHasKey = Object.keys(init).length !== 0; if (initHasKey) { - if (request2.mode === "navigate") { - request2.mode = "same-origin"; + if (request3.mode === "navigate") { + request3.mode = "same-origin"; } - request2.reloadNavigation = false; - request2.historyNavigation = false; - request2.origin = "client"; - request2.referrer = "client"; - request2.referrerPolicy = ""; - request2.url = request2.urlList[request2.urlList.length - 1]; - request2.urlList = [request2.url]; + request3.reloadNavigation = false; + request3.historyNavigation = false; + request3.origin = "client"; + request3.referrer = "client"; + request3.referrerPolicy = ""; + request3.url = request3.urlList[request3.urlList.length - 1]; + request3.urlList = [request3.url]; } if (init.referrer !== void 0) { const referrer = init.referrer; if (referrer === "") { - request2.referrer = "no-referrer"; + request3.referrer = "no-referrer"; } else { let parsedReferrer; try { @@ -29060,14 +29038,14 @@ var require_request4 = __commonJS({ throw new TypeError(`Referrer "${referrer}" is not a valid URL.`, { cause: err }); } if (parsedReferrer.protocol === "about:" && parsedReferrer.hostname === "client" || origin && !sameOrigin(parsedReferrer, this[kRealm].settingsObject.baseUrl)) { - request2.referrer = "client"; + request3.referrer = "client"; } else { - request2.referrer = parsedReferrer; + request3.referrer = parsedReferrer; } } } if (init.referrerPolicy !== void 0) { - request2.referrerPolicy = init.referrerPolicy; + request3.referrerPolicy = init.referrerPolicy; } let mode; if (init.mode !== void 0) { @@ -29082,33 +29060,33 @@ var require_request4 = __commonJS({ }); } if (mode != null) { - request2.mode = mode; + request3.mode = mode; } if (init.credentials !== void 0) { - request2.credentials = init.credentials; + request3.credentials = init.credentials; } if (init.cache !== void 0) { - request2.cache = init.cache; + request3.cache = init.cache; } - if (request2.cache === "only-if-cached" && request2.mode !== "same-origin") { + if (request3.cache === "only-if-cached" && request3.mode !== "same-origin") { throw new TypeError( "'only-if-cached' can be set only with 'same-origin' mode" ); } if (init.redirect !== void 0) { - request2.redirect = init.redirect; + request3.redirect = init.redirect; } if (init.integrity != null) { - request2.integrity = String(init.integrity); + request3.integrity = String(init.integrity); } if (init.keepalive !== void 0) { - request2.keepalive = Boolean(init.keepalive); + request3.keepalive = Boolean(init.keepalive); } if (init.method !== void 0) { let method = init.method; const mayBeNormalized = normalizeMethodRecord[method]; if (mayBeNormalized !== void 0) { - request2.method = mayBeNormalized; + request3.method = mayBeNormalized; } else { if (!isValidHTTPToken(method)) { throw new TypeError(`'${method}' is not a valid HTTP method.`); @@ -29117,9 +29095,9 @@ var require_request4 = __commonJS({ throw new TypeError(`'${method}' HTTP method is unsupported.`); } method = normalizeMethod(method); - request2.method = method; + request3.method = method; } - if (!patchMethodWarning && request2.method === "patch") { + if (!patchMethodWarning && request3.method === "patch") { process.emitWarning("Using `patch` is highly likely to result in a `405 Method Not Allowed`. `PATCH` is much more likely to succeed.", { code: "UNDICI-FETCH-patch" }); @@ -29129,7 +29107,7 @@ var require_request4 = __commonJS({ if (init.signal !== void 0) { signal = init.signal; } - this[kState] = request2; + this[kState] = request3; const ac = new AbortController(); this[kSignal] = ac.signal; this[kSignal][kRealm] = this[kRealm]; @@ -29165,13 +29143,13 @@ var require_request4 = __commonJS({ } } this[kHeaders] = new Headers2(kConstruct); - this[kHeaders][kHeadersList] = request2.headersList; + this[kHeaders][kHeadersList] = request3.headersList; this[kHeaders][kGuard] = "request"; this[kHeaders][kRealm] = this[kRealm]; if (mode === "no-cors") { - if (!corsSafeListedMethodsSet.has(request2.method)) { + if (!corsSafeListedMethodsSet.has(request3.method)) { throw new TypeError( - `'${request2.method} is unsupported in no-cors mode.` + `'${request3.method} is unsupported in no-cors mode.` ); } this[kHeaders][kGuard] = "request-no-cors"; @@ -29190,14 +29168,14 @@ var require_request4 = __commonJS({ } } const inputBody = input instanceof _Request ? input[kState].body : null; - if ((init.body != null || inputBody != null) && (request2.method === "GET" || request2.method === "HEAD")) { + if ((init.body != null || inputBody != null) && (request3.method === "GET" || request3.method === "HEAD")) { throw new TypeError("Request with GET/HEAD method cannot have body."); } let initBody = null; if (init.body != null) { const [extractedBody, contentType] = extractBody( init.body, - request2.keepalive + request3.keepalive ); initBody = extractedBody; if (contentType && !this[kHeaders][kHeadersList].contains("content-type", true)) { @@ -29209,12 +29187,12 @@ var require_request4 = __commonJS({ if (initBody != null && init.duplex == null) { throw new TypeError("RequestInit: duplex option is required when sending a body."); } - if (request2.mode !== "same-origin" && request2.mode !== "cors") { + if (request3.mode !== "same-origin" && request3.mode !== "cors") { throw new TypeError( 'If request is made from ReadableStream, mode should be "same-origin" or "cors"' ); } - request2.useCORSPreflightFlag = true; + request3.useCORSPreflightFlag = true; } let finalBody = inputOrInitBody; if (initBody == null && inputBody != null) { @@ -29397,7 +29375,7 @@ var require_request4 = __commonJS({ }; mixinBody(Request2); function makeRequest(init) { - const request2 = { + const request3 = { method: "GET", localURLsOnly: false, unsafeRequest: false, @@ -29436,27 +29414,27 @@ var require_request4 = __commonJS({ ...init, headersList: init.headersList ? new HeadersList(init.headersList) : new HeadersList() }; - request2.url = request2.urlList[0]; - return request2; + request3.url = request3.urlList[0]; + return request3; } - function cloneRequest(request2) { - const newRequest = makeRequest({ ...request2, body: null }); - if (request2.body != null) { - newRequest.body = cloneBody(request2.body); + function cloneRequest(request3) { + const newRequest = makeRequest({ ...request3, body: null }); + if (request3.body != null) { + newRequest.body = cloneBody(request3.body); } return newRequest; } function fromInnerRequest(innerRequest, signal, guard, realm) { - const request2 = new Request2(kConstruct); - request2[kState] = innerRequest; - request2[kRealm] = realm; - request2[kSignal] = signal; - request2[kSignal][kRealm] = realm; - request2[kHeaders] = new Headers2(kConstruct); - request2[kHeaders][kHeadersList] = innerRequest.headersList; - request2[kHeaders][kGuard] = guard; - request2[kHeaders][kRealm] = realm; - return request2; + const request3 = new Request2(kConstruct); + request3[kState] = innerRequest; + request3[kRealm] = realm; + request3[kSignal] = signal; + request3[kSignal][kRealm] = realm; + request3[kHeaders] = new Headers2(kConstruct); + request3[kHeaders][kHeadersList] = innerRequest.headersList; + request3[kHeaders][kGuard] = guard; + request3[kHeaders][kRealm] = realm; + return request3; } Object.defineProperties(Request2.prototype, { method: kEnumerableProperty, @@ -29693,14 +29671,14 @@ var require_fetch2 = __commonJS({ p.reject(e2); return p.promise; } - const request2 = requestObject[kState]; + const request3 = requestObject[kState]; if (requestObject.signal.aborted) { - abortFetch(p, request2, null, requestObject.signal.reason); + abortFetch(p, request3, null, requestObject.signal.reason); return p.promise; } - const globalObject = request2.client.globalObject; + const globalObject = request3.client.globalObject; if (globalObject?.constructor?.name === "ServiceWorkerGlobalScope") { - request2.serviceWorkers = "none"; + request3.serviceWorkers = "none"; } let responseObject = null; const relevantRealm = null; @@ -29712,7 +29690,7 @@ var require_fetch2 = __commonJS({ locallyAborted = true; assert(controller != null); controller.abort(requestObject.signal.reason); - abortFetch(p, request2, responseObject, requestObject.signal.reason); + abortFetch(p, request3, responseObject, requestObject.signal.reason); } ); const handleFetchDone = (response) => finalizeAndReportTiming(response, "fetch"); @@ -29721,7 +29699,7 @@ var require_fetch2 = __commonJS({ return; } if (response.aborted) { - abortFetch(p, request2, responseObject, controller.serializedAbortReason); + abortFetch(p, request3, responseObject, controller.serializedAbortReason); return; } if (response.type === "error") { @@ -29732,7 +29710,7 @@ var require_fetch2 = __commonJS({ p.resolve(responseObject); }; controller = fetching({ - request: request2, + request: request3, processResponseEndOfBody: handleFetchDone, processResponse, dispatcher: requestObject[kDispatcher] @@ -29774,10 +29752,10 @@ var require_fetch2 = __commonJS({ } var markResourceTiming = nodeMajor > 18 || nodeMajor === 18 && nodeMinor >= 2 ? performance.markResourceTiming : () => { }; - function abortFetch(p, request2, responseObject, error) { + function abortFetch(p, request3, responseObject, error) { p.reject(error); - if (request2.body != null && isReadable(request2.body?.stream)) { - request2.body.stream.cancel(error).catch((err) => { + if (request3.body != null && isReadable(request3.body?.stream)) { + request3.body.stream.cancel(error).catch((err) => { if (err.code === "ERR_INVALID_STATE") { return; } @@ -29798,7 +29776,7 @@ var require_fetch2 = __commonJS({ } } function fetching({ - request: request2, + request: request3, processRequestBodyChunkLength, processRequestEndOfBody, processResponse, @@ -29811,9 +29789,9 @@ var require_fetch2 = __commonJS({ assert(dispatcher); let taskDestination = null; let crossOriginIsolatedCapability = false; - if (request2.client != null) { - taskDestination = request2.client.globalObject; - crossOriginIsolatedCapability = request2.client.crossOriginIsolatedCapability; + if (request3.client != null) { + taskDestination = request3.client.globalObject; + crossOriginIsolatedCapability = request3.client.crossOriginIsolatedCapability; } const currentTime = coarsenedSharedCurrentTime(crossOriginIsolatedCapability); const timingInfo = createOpaqueTimingInfo({ @@ -29821,7 +29799,7 @@ var require_fetch2 = __commonJS({ }); const fetchParams = { controller: new Fetch(dispatcher), - request: request2, + request: request3, timingInfo, processRequestBodyChunkLength, processRequestEndOfBody, @@ -29831,32 +29809,32 @@ var require_fetch2 = __commonJS({ taskDestination, crossOriginIsolatedCapability }; - assert(!request2.body || request2.body.stream); - if (request2.window === "client") { - request2.window = request2.client?.globalObject?.constructor?.name === "Window" ? request2.client : "no-window"; + assert(!request3.body || request3.body.stream); + if (request3.window === "client") { + request3.window = request3.client?.globalObject?.constructor?.name === "Window" ? request3.client : "no-window"; } - if (request2.origin === "client") { - request2.origin = request2.client?.origin; + if (request3.origin === "client") { + request3.origin = request3.client?.origin; } - if (request2.policyContainer === "client") { - if (request2.client != null) { - request2.policyContainer = clonePolicyContainer( - request2.client.policyContainer + if (request3.policyContainer === "client") { + if (request3.client != null) { + request3.policyContainer = clonePolicyContainer( + request3.client.policyContainer ); } else { - request2.policyContainer = makePolicyContainer(); + request3.policyContainer = makePolicyContainer(); } } - if (!request2.headersList.contains("accept", true)) { + if (!request3.headersList.contains("accept", true)) { const value = "*/*"; - request2.headersList.append("accept", value, true); + request3.headersList.append("accept", value, true); } - if (!request2.headersList.contains("accept-language", true)) { - request2.headersList.append("accept-language", "*", true); + if (!request3.headersList.contains("accept-language", true)) { + request3.headersList.append("accept-language", "*", true); } - if (request2.priority === null) { + if (request3.priority === null) { } - if (subresourceSet.has(request2.destination)) { + if (subresourceSet.has(request3.destination)) { } mainFetch(fetchParams).catch((err) => { fetchParams.controller.terminate(err); @@ -29864,50 +29842,50 @@ var require_fetch2 = __commonJS({ return fetchParams.controller; } async function mainFetch(fetchParams, recursive = false) { - const request2 = fetchParams.request; + const request3 = fetchParams.request; let response = null; - if (request2.localURLsOnly && !urlIsLocal(requestCurrentURL(request2))) { + if (request3.localURLsOnly && !urlIsLocal(requestCurrentURL(request3))) { response = makeNetworkError("local URLs only"); } - tryUpgradeRequestToAPotentiallyTrustworthyURL(request2); - if (requestBadPort(request2) === "blocked") { + tryUpgradeRequestToAPotentiallyTrustworthyURL(request3); + if (requestBadPort(request3) === "blocked") { response = makeNetworkError("bad port"); } - if (request2.referrerPolicy === "") { - request2.referrerPolicy = request2.policyContainer.referrerPolicy; + if (request3.referrerPolicy === "") { + request3.referrerPolicy = request3.policyContainer.referrerPolicy; } - if (request2.referrer !== "no-referrer") { - request2.referrer = determineRequestsReferrer2(request2); + if (request3.referrer !== "no-referrer") { + request3.referrer = determineRequestsReferrer2(request3); } if (response === null) { response = await (async () => { - const currentURL = requestCurrentURL(request2); + const currentURL = requestCurrentURL(request3); if ( // - request’s current URL’s origin is same origin with request’s origin, // and request’s response tainting is "basic" - sameOrigin(currentURL, request2.url) && request2.responseTainting === "basic" || // request’s current URL’s scheme is "data" + sameOrigin(currentURL, request3.url) && request3.responseTainting === "basic" || // request’s current URL’s scheme is "data" currentURL.protocol === "data:" || // - request’s mode is "navigate" or "websocket" - (request2.mode === "navigate" || request2.mode === "websocket") + (request3.mode === "navigate" || request3.mode === "websocket") ) { - request2.responseTainting = "basic"; + request3.responseTainting = "basic"; return await schemeFetch(fetchParams); } - if (request2.mode === "same-origin") { + if (request3.mode === "same-origin") { return makeNetworkError('request mode cannot be "same-origin"'); } - if (request2.mode === "no-cors") { - if (request2.redirect !== "follow") { + if (request3.mode === "no-cors") { + if (request3.redirect !== "follow") { return makeNetworkError( 'redirect mode cannot be "follow" for "no-cors" request' ); } - request2.responseTainting = "opaque"; + request3.responseTainting = "opaque"; return await schemeFetch(fetchParams); } - if (!urlIsHttpHttpsScheme(requestCurrentURL(request2))) { + if (!urlIsHttpHttpsScheme(requestCurrentURL(request3))) { return makeNetworkError("URL scheme must be a HTTP(S) scheme"); } - request2.responseTainting = "cors"; + request3.responseTainting = "cors"; return await httpFetch(fetchParams); })(); } @@ -29915,13 +29893,13 @@ var require_fetch2 = __commonJS({ return response; } if (response.status !== 0 && !response.internalResponse) { - if (request2.responseTainting === "cors") { + if (request3.responseTainting === "cors") { } - if (request2.responseTainting === "basic") { + if (request3.responseTainting === "basic") { response = filterResponse(response, "basic"); - } else if (request2.responseTainting === "cors") { + } else if (request3.responseTainting === "cors") { response = filterResponse(response, "cors"); - } else if (request2.responseTainting === "opaque") { + } else if (request3.responseTainting === "opaque") { response = filterResponse(response, "opaque"); } else { assert(false); @@ -29929,26 +29907,26 @@ var require_fetch2 = __commonJS({ } let internalResponse = response.status === 0 ? response : response.internalResponse; if (internalResponse.urlList.length === 0) { - internalResponse.urlList.push(...request2.urlList); + internalResponse.urlList.push(...request3.urlList); } - if (!request2.timingAllowFailed) { + if (!request3.timingAllowFailed) { response.timingAllowPassed = true; } - if (response.type === "opaque" && internalResponse.status === 206 && internalResponse.rangeRequested && !request2.headers.contains("range", true)) { + if (response.type === "opaque" && internalResponse.status === 206 && internalResponse.rangeRequested && !request3.headers.contains("range", true)) { response = internalResponse = makeNetworkError(); } - if (response.status !== 0 && (request2.method === "HEAD" || request2.method === "CONNECT" || nullBodyStatus.includes(internalResponse.status))) { + if (response.status !== 0 && (request3.method === "HEAD" || request3.method === "CONNECT" || nullBodyStatus.includes(internalResponse.status))) { internalResponse.body = null; fetchParams.controller.dump = true; } - if (request2.integrity) { + if (request3.integrity) { const processBodyError = (reason) => fetchFinale(fetchParams, makeNetworkError(reason)); - if (request2.responseTainting === "opaque" || response.body == null) { + if (request3.responseTainting === "opaque" || response.body == null) { processBodyError(response.error); return; } const processBody = (bytes) => { - if (!bytesMatch(bytes, request2.integrity)) { + if (!bytesMatch(bytes, request3.integrity)) { processBodyError("integrity mismatch"); return; } @@ -29964,8 +29942,8 @@ var require_fetch2 = __commonJS({ if (isCancelled(fetchParams) && fetchParams.request.redirectCount === 0) { return Promise.resolve(makeAppropriateNetworkError(fetchParams)); } - const { request: request2 } = fetchParams; - const { protocol: scheme } = requestCurrentURL(request2); + const { request: request3 } = fetchParams; + const { protocol: scheme } = requestCurrentURL(request3); switch (scheme) { case "about:": { return Promise.resolve(makeNetworkError("about scheme is not supported")); @@ -29974,19 +29952,19 @@ var require_fetch2 = __commonJS({ if (!resolveObjectURL) { resolveObjectURL = require("buffer").resolveObjectURL; } - const blobURLEntry = requestCurrentURL(request2); + const blobURLEntry = requestCurrentURL(request3); if (blobURLEntry.search.length !== 0) { return Promise.resolve(makeNetworkError("NetworkError when attempting to fetch resource.")); } const blob = resolveObjectURL(blobURLEntry.toString()); - if (request2.method !== "GET" || !isBlobLike(blob)) { + if (request3.method !== "GET" || !isBlobLike(blob)) { return Promise.resolve(makeNetworkError("invalid method")); } const response = makeResponse(); const fullLength = blob.size; const serializedFullLength = isomorphicEncode(`${fullLength}`); const type = blob.type; - if (!request2.headersList.contains("range", true)) { + if (!request3.headersList.contains("range", true)) { const bodyWithType = extractBody(blob); response.statusText = "OK"; response.body = bodyWithType[0]; @@ -29994,7 +29972,7 @@ var require_fetch2 = __commonJS({ response.headersList.set("content-type", type, true); } else { response.rangeRequested = true; - const rangeHeader = request2.headersList.get("range", true); + const rangeHeader = request3.headersList.get("range", true); const rangeValue = simpleRangeHeaderValue(rangeHeader, true); if (rangeValue === "failure") { return Promise.resolve(makeNetworkError("failed to fetch the data URL")); @@ -30025,7 +30003,7 @@ var require_fetch2 = __commonJS({ return Promise.resolve(response); } case "data:": { - const currentURL = requestCurrentURL(request2); + const currentURL = requestCurrentURL(request3); const dataURLStruct = dataURLProcessor(currentURL); if (dataURLStruct === "failure") { return Promise.resolve(makeNetworkError("failed to fetch the data URL")); @@ -30135,41 +30113,41 @@ var require_fetch2 = __commonJS({ } } async function httpFetch(fetchParams) { - const request2 = fetchParams.request; + const request3 = fetchParams.request; let response = null; let actualResponse = null; const timingInfo = fetchParams.timingInfo; - if (request2.serviceWorkers === "all") { + if (request3.serviceWorkers === "all") { } if (response === null) { - if (request2.redirect === "follow") { - request2.serviceWorkers = "none"; + if (request3.redirect === "follow") { + request3.serviceWorkers = "none"; } actualResponse = response = await httpNetworkOrCacheFetch(fetchParams); - if (request2.responseTainting === "cors" && corsCheck(request2, response) === "failure") { + if (request3.responseTainting === "cors" && corsCheck(request3, response) === "failure") { return makeNetworkError("cors failure"); } - if (TAOCheck(request2, response) === "failure") { - request2.timingAllowFailed = true; + if (TAOCheck(request3, response) === "failure") { + request3.timingAllowFailed = true; } } - if ((request2.responseTainting === "opaque" || response.type === "opaque") && crossOriginResourcePolicyCheck( - request2.origin, - request2.client, - request2.destination, + if ((request3.responseTainting === "opaque" || response.type === "opaque") && crossOriginResourcePolicyCheck( + request3.origin, + request3.client, + request3.destination, actualResponse ) === "blocked") { return makeNetworkError("blocked"); } if (redirectStatusSet.has(actualResponse.status)) { - if (request2.redirect !== "manual") { + if (request3.redirect !== "manual") { fetchParams.controller.connection.destroy(void 0, false); } - if (request2.redirect === "error") { + if (request3.redirect === "error") { response = makeNetworkError("unexpected redirect"); - } else if (request2.redirect === "manual") { + } else if (request3.redirect === "manual") { response = actualResponse; - } else if (request2.redirect === "follow") { + } else if (request3.redirect === "follow") { response = await httpRedirectFetch(fetchParams, response); } else { assert(false); @@ -30179,13 +30157,13 @@ var require_fetch2 = __commonJS({ return response; } function httpRedirectFetch(fetchParams, response) { - const request2 = fetchParams.request; + const request3 = fetchParams.request; const actualResponse = response.internalResponse ? response.internalResponse : response; let locationURL; try { locationURL = responseLocationURL( actualResponse, - requestCurrentURL(request2).hash + requestCurrentURL(request3).hash ); if (locationURL == null) { return response; @@ -30196,63 +30174,63 @@ var require_fetch2 = __commonJS({ if (!urlIsHttpHttpsScheme(locationURL)) { return Promise.resolve(makeNetworkError("URL scheme must be a HTTP(S) scheme")); } - if (request2.redirectCount === 20) { + if (request3.redirectCount === 20) { return Promise.resolve(makeNetworkError("redirect count exceeded")); } - request2.redirectCount += 1; - if (request2.mode === "cors" && (locationURL.username || locationURL.password) && !sameOrigin(request2, locationURL)) { + request3.redirectCount += 1; + if (request3.mode === "cors" && (locationURL.username || locationURL.password) && !sameOrigin(request3, locationURL)) { return Promise.resolve(makeNetworkError('cross origin not allowed for request mode "cors"')); } - if (request2.responseTainting === "cors" && (locationURL.username || locationURL.password)) { + if (request3.responseTainting === "cors" && (locationURL.username || locationURL.password)) { return Promise.resolve(makeNetworkError( 'URL cannot contain credentials for request mode "cors"' )); } - if (actualResponse.status !== 303 && request2.body != null && request2.body.source == null) { + if (actualResponse.status !== 303 && request3.body != null && request3.body.source == null) { return Promise.resolve(makeNetworkError()); } - if ([301, 302].includes(actualResponse.status) && request2.method === "POST" || actualResponse.status === 303 && !GET_OR_HEAD.includes(request2.method)) { - request2.method = "GET"; - request2.body = null; + if ([301, 302].includes(actualResponse.status) && request3.method === "POST" || actualResponse.status === 303 && !GET_OR_HEAD.includes(request3.method)) { + request3.method = "GET"; + request3.body = null; for (const headerName of requestBodyHeader) { - request2.headersList.delete(headerName); + request3.headersList.delete(headerName); } } - if (!sameOrigin(requestCurrentURL(request2), locationURL)) { - request2.headersList.delete("authorization", true); - request2.headersList.delete("proxy-authorization", true); - request2.headersList.delete("cookie", true); - request2.headersList.delete("host", true); + if (!sameOrigin(requestCurrentURL(request3), locationURL)) { + request3.headersList.delete("authorization", true); + request3.headersList.delete("proxy-authorization", true); + request3.headersList.delete("cookie", true); + request3.headersList.delete("host", true); } - if (request2.body != null) { - assert(request2.body.source != null); - request2.body = safelyExtractBody(request2.body.source)[0]; + if (request3.body != null) { + assert(request3.body.source != null); + request3.body = safelyExtractBody(request3.body.source)[0]; } const timingInfo = fetchParams.timingInfo; timingInfo.redirectEndTime = timingInfo.postRedirectStartTime = coarsenedSharedCurrentTime(fetchParams.crossOriginIsolatedCapability); if (timingInfo.redirectStartTime === 0) { timingInfo.redirectStartTime = timingInfo.startTime; } - request2.urlList.push(locationURL); - setRequestReferrerPolicyOnRedirect(request2, actualResponse); + request3.urlList.push(locationURL); + setRequestReferrerPolicyOnRedirect(request3, actualResponse); return mainFetch(fetchParams, true); } async function httpNetworkOrCacheFetch(fetchParams, isAuthenticationFetch = false, isNewConnectionFetch = false) { - const request2 = fetchParams.request; + const request3 = fetchParams.request; let httpFetchParams = null; let httpRequest = null; let response = null; const httpCache = null; const revalidatingFlag = false; - if (request2.window === "no-window" && request2.redirect === "error") { + if (request3.window === "no-window" && request3.redirect === "error") { httpFetchParams = fetchParams; - httpRequest = request2; + httpRequest = request3; } else { - httpRequest = cloneRequest(request2); + httpRequest = cloneRequest(request3); httpFetchParams = { ...fetchParams }; httpFetchParams.request = httpRequest; } - const includeCredentials = request2.credentials === "include" || request2.credentials === "same-origin" && request2.responseTainting === "basic"; + const includeCredentials = request3.credentials === "include" || request3.credentials === "same-origin" && request3.responseTainting === "basic"; const contentLength = httpRequest.body ? httpRequest.body.length : null; let contentLengthHeaderValue = null; if (httpRequest.body == null && ["POST", "PUT"].includes(httpRequest.method)) { @@ -30329,7 +30307,7 @@ var require_fetch2 = __commonJS({ } response.requestIncludesCredentials = includeCredentials; if (response.status === 407) { - if (request2.window === "no-window") { + if (request3.window === "no-window") { return makeNetworkError(); } if (isCancelled(fetchParams)) { @@ -30341,7 +30319,7 @@ var require_fetch2 = __commonJS({ // response’s status is 421 response.status === 421 && // isNewConnectionFetch is false !isNewConnectionFetch && // request’s body is null, or request’s body is non-null and request’s body’s source is non-null - (request2.body == null || request2.body.source != null) + (request3.body == null || request3.body.source != null) ) { if (isCancelled(fetchParams)) { return makeAppropriateNetworkError(fetchParams); @@ -30371,21 +30349,21 @@ var require_fetch2 = __commonJS({ } } }; - const request2 = fetchParams.request; + const request3 = fetchParams.request; let response = null; const timingInfo = fetchParams.timingInfo; const httpCache = null; if (httpCache == null) { - request2.cache = "no-store"; + request3.cache = "no-store"; } const newConnection = forceNewConnection ? "yes" : "no"; - if (request2.mode === "websocket") { + if (request3.mode === "websocket") { } else { } let requestBody = null; - if (request2.body == null && fetchParams.processRequestEndOfBody) { + if (request3.body == null && fetchParams.processRequestEndOfBody) { queueMicrotask(() => fetchParams.processRequestEndOfBody()); - } else if (request2.body != null) { + } else if (request3.body != null) { const processBodyChunk = async function* (bytes) { if (isCancelled(fetchParams)) { return; @@ -30413,7 +30391,7 @@ var require_fetch2 = __commonJS({ }; requestBody = async function* () { try { - for await (const bytes of request2.body.stream) { + for await (const bytes of request3.body.stream) { yield* processBodyChunk(bytes); } processEndOfBody(); @@ -30427,8 +30405,8 @@ var require_fetch2 = __commonJS({ if (socket) { response = makeResponse({ status, statusText, headersList, socket }); } else { - const iterator2 = body[Symbol.asyncIterator](); - fetchParams.controller.next = () => iterator2.next(); + const iterator3 = body[Symbol.asyncIterator](); + fetchParams.controller.next = () => iterator3.next(); response = makeResponse({ status, statusText, headersList }); } } catch (err) { @@ -30521,17 +30499,17 @@ var require_fetch2 = __commonJS({ } return response; function dispatch({ body }) { - const url = requestCurrentURL(request2); + const url = requestCurrentURL(request3); const agent = fetchParams.controller.dispatcher; return new Promise((resolve, reject) => agent.dispatch( { path: url.pathname + url.search, origin: url.origin, - method: request2.method, - body: agent.isMockActive ? request2.body && (request2.body.source || request2.body.stream) : body, - headers: request2.headersList.entries, + method: request3.method, + body: agent.isMockActive ? request3.body && (request3.body.source || request3.body.stream) : body, + headers: request3.headersList.entries, maxRedirections: 0, - upgrade: request2.mode === "websocket" ? "websocket" : void 0 + upgrade: request3.mode === "websocket" ? "websocket" : void 0 }, { body: null, @@ -30569,8 +30547,8 @@ var require_fetch2 = __commonJS({ } this.body = new Readable({ read: resume }); const decoders = []; - const willFollow = location && request2.redirect === "follow" && redirectStatusSet.has(status); - if (request2.method !== "HEAD" && request2.method !== "CONNECT" && !nullBodyStatus.includes(status) && !willFollow) { + const willFollow = location && request3.redirect === "follow" && redirectStatusSet.has(status); + if (request3.method !== "HEAD" && request3.method !== "CONNECT" && !nullBodyStatus.includes(status) && !willFollow) { for (let i2 = 0; i2 < codings.length; ++i2) { const coding = codings[i2]; if (coding === "x-gzip" || coding === "gzip") { @@ -31536,29 +31514,28 @@ var require_cache2 = __commonJS({ } this.#relevantRequestResponseList = arguments[1]; } - async match(request2, options = {}) { + async match(request3, options = {}) { webidl.brandCheck(this, _Cache); webidl.argumentLengthCheck(arguments, 1, { header: "Cache.match" }); - request2 = webidl.converters.RequestInfo(request2); + request3 = webidl.converters.RequestInfo(request3); options = webidl.converters.CacheQueryOptions(options); - const p = this.#internalMatchAll(request2, options, 1); + const p = this.#internalMatchAll(request3, options, 1); if (p.length === 0) { return; } return p[0]; } - async matchAll(request2 = void 0, options = {}) { + async matchAll(request3 = void 0, options = {}) { webidl.brandCheck(this, _Cache); - if (request2 !== void 0) - request2 = webidl.converters.RequestInfo(request2); + if (request3 !== void 0) request3 = webidl.converters.RequestInfo(request3); options = webidl.converters.CacheQueryOptions(options); - return this.#internalMatchAll(request2, options); + return this.#internalMatchAll(request3, options); } - async add(request2) { + async add(request3) { webidl.brandCheck(this, _Cache); webidl.argumentLengthCheck(arguments, 1, { header: "Cache.add" }); - request2 = webidl.converters.RequestInfo(request2); - const requests = [request2]; + request3 = webidl.converters.RequestInfo(request3); + const requests = [request3]; const responseArrayPromise = this.addAll(requests); return await responseArrayPromise; } @@ -31567,19 +31544,19 @@ var require_cache2 = __commonJS({ webidl.argumentLengthCheck(arguments, 1, { header: "Cache.addAll" }); const responsePromises = []; const requestList = []; - for (let request2 of requests) { - if (request2 === void 0) { + for (let request3 of requests) { + if (request3 === void 0) { throw webidl.errors.conversionFailed({ prefix: "Cache.addAll", argument: "Argument 1", types: ["undefined is not allowed"] }); } - request2 = webidl.converters.RequestInfo(request2); - if (typeof request2 === "string") { + request3 = webidl.converters.RequestInfo(request3); + if (typeof request3 === "string") { continue; } - const r2 = request2[kState]; + const r2 = request3[kState]; if (!urlIsHttpHttpsScheme(r2.url) || r2.method !== "GET") { throw webidl.errors.exception({ header: "Cache.addAll", @@ -31588,8 +31565,8 @@ var require_cache2 = __commonJS({ } } const fetchControllers = []; - for (const request2 of requests) { - const r2 = new Request2(request2)[kState]; + for (const request3 of requests) { + const r2 = new Request2(request3)[kState]; if (!urlIsHttpHttpsScheme(r2.url)) { throw webidl.errors.exception({ header: "Cache.addAll", @@ -31666,16 +31643,16 @@ var require_cache2 = __commonJS({ }); return cacheJobPromise.promise; } - async put(request2, response) { + async put(request3, response) { webidl.brandCheck(this, _Cache); webidl.argumentLengthCheck(arguments, 2, { header: "Cache.put" }); - request2 = webidl.converters.RequestInfo(request2); + request3 = webidl.converters.RequestInfo(request3); response = webidl.converters.Response(response); let innerRequest = null; - if (request2 instanceof Request2) { - innerRequest = request2[kState]; + if (request3 instanceof Request2) { + innerRequest = request3[kState]; } else { - innerRequest = new Request2(request2)[kState]; + innerRequest = new Request2(request3)[kState]; } if (!urlIsHttpHttpsScheme(innerRequest.url) || innerRequest.method !== "GET") { throw webidl.errors.exception({ @@ -31746,20 +31723,20 @@ var require_cache2 = __commonJS({ }); return cacheJobPromise.promise; } - async delete(request2, options = {}) { + async delete(request3, options = {}) { webidl.brandCheck(this, _Cache); webidl.argumentLengthCheck(arguments, 1, { header: "Cache.delete" }); - request2 = webidl.converters.RequestInfo(request2); + request3 = webidl.converters.RequestInfo(request3); options = webidl.converters.CacheQueryOptions(options); let r2 = null; - if (request2 instanceof Request2) { - r2 = request2[kState]; + if (request3 instanceof Request2) { + r2 = request3[kState]; if (r2.method !== "GET" && !options.ignoreMethod) { return false; } } else { - assert(typeof request2 === "string"); - r2 = new Request2(request2)[kState]; + assert(typeof request3 === "string"); + r2 = new Request2(request3)[kState]; } const operations = []; const operation = { @@ -31791,25 +31768,24 @@ var require_cache2 = __commonJS({ * @param {import('../../types/cache').CacheQueryOptions} options * @returns {Promise} */ - async keys(request2 = void 0, options = {}) { + async keys(request3 = void 0, options = {}) { webidl.brandCheck(this, _Cache); - if (request2 !== void 0) - request2 = webidl.converters.RequestInfo(request2); + if (request3 !== void 0) request3 = webidl.converters.RequestInfo(request3); options = webidl.converters.CacheQueryOptions(options); let r2 = null; - if (request2 !== void 0) { - if (request2 instanceof Request2) { - r2 = request2[kState]; + if (request3 !== void 0) { + if (request3 instanceof Request2) { + r2 = request3[kState]; if (r2.method !== "GET" && !options.ignoreMethod) { return []; } - } else if (typeof request2 === "string") { - r2 = new Request2(request2)[kState]; + } else if (typeof request3 === "string") { + r2 = new Request2(request3)[kState]; } } const promise = createDeferredPromise(); const requests = []; - if (request2 === void 0) { + if (request3 === void 0) { for (const requestResponse of this.#relevantRequestResponseList) { requests.push(requestResponse[0]); } @@ -31821,12 +31797,12 @@ var require_cache2 = __commonJS({ } queueMicrotask(() => { const requestList = []; - for (const request3 of requests) { + for (const request4 of requests) { const requestObject = fromInnerRequest( - request3, + request4, new AbortController().signal, "immutable", - { settingsObject: request3.client } + { settingsObject: request4.client } ); requestList.push(requestObject); } @@ -31942,9 +31918,9 @@ var require_cache2 = __commonJS({ * @param {import('../../types/cache').CacheQueryOptions | undefined} options * @returns {boolean} */ - #requestMatchesCachedItem(requestQuery, request2, response = null, options) { + #requestMatchesCachedItem(requestQuery, request3, response = null, options) { const queryURL = new URL(requestQuery.url); - const cachedURL = new URL(request2.url); + const cachedURL = new URL(request3.url); if (options?.ignoreSearch) { cachedURL.search = ""; queryURL.search = ""; @@ -31960,7 +31936,7 @@ var require_cache2 = __commonJS({ if (fieldValue === "*") { return false; } - const requestValue = request2.headersList.get(fieldValue); + const requestValue = request3.headersList.get(fieldValue); const queryValue = requestQuery.headersList.get(fieldValue); if (requestValue !== queryValue) { return false; @@ -31968,20 +31944,20 @@ var require_cache2 = __commonJS({ } return true; } - #internalMatchAll(request2, options, maxResponses = Infinity) { + #internalMatchAll(request3, options, maxResponses = Infinity) { let r2 = null; - if (request2 !== void 0) { - if (request2 instanceof Request2) { - r2 = request2[kState]; + if (request3 !== void 0) { + if (request3 instanceof Request2) { + r2 = request3[kState]; if (r2.method !== "GET" && !options.ignoreMethod) { return []; } - } else if (typeof request2 === "string") { - r2 = new Request2(request2)[kState]; + } else if (typeof request3 === "string") { + r2 = new Request2(request3)[kState]; } } const responses = []; - if (request2 === void 0) { + if (request3 === void 0) { for (const requestResponse of this.#relevantRequestResponseList) { responses.push(requestResponse[1]); } @@ -32069,21 +32045,21 @@ var require_cachestorage2 = __commonJS({ webidl.illegalConstructor(); } } - async match(request2, options = {}) { + async match(request3, options = {}) { webidl.brandCheck(this, _CacheStorage); webidl.argumentLengthCheck(arguments, 1, { header: "CacheStorage.match" }); - request2 = webidl.converters.RequestInfo(request2); + request3 = webidl.converters.RequestInfo(request3); options = webidl.converters.MultiCacheQueryOptions(options); if (options.cacheName != null) { if (this.#caches.has(options.cacheName)) { const cacheList = this.#caches.get(options.cacheName); const cache2 = new Cache(kConstruct, cacheList); - return await cache2.match(request2, options); + return await cache2.match(request3, options); } } else { for (const cacheList of this.#caches.values()) { const cache2 = new Cache(kConstruct, cacheList); - const response = await cache2.match(request2, options); + const response = await cache2.match(request3, options); if (response !== void 0) { return response; } @@ -32282,7 +32258,7 @@ var require_util13 = __commonJS({ throw new Error("Invalid cookie max-age"); } } - function stringify2(cookie) { + function stringify(cookie) { if (cookie.name.length === 0) { return null; } @@ -32351,7 +32327,7 @@ var require_util13 = __commonJS({ validateCookiePath, validateCookieValue, toIMFDate, - stringify: stringify2, + stringify, getHeadersList }; } @@ -32502,7 +32478,7 @@ var require_cookies2 = __commonJS({ "node_modules/undici/lib/web/cookies/index.js"(exports2, module2) { "use strict"; var { parseSetCookie } = require_parse2(); - var { stringify: stringify2, getHeadersList } = require_util13(); + var { stringify, getHeadersList } = require_util13(); var { webidl } = require_webidl2(); var { Headers: Headers2 } = require_headers2(); function getCookies(headers) { @@ -32544,7 +32520,7 @@ var require_cookies2 = __commonJS({ webidl.argumentLengthCheck(arguments, 2, { header: "setCookie" }); webidl.brandCheck(headers, Headers2, { strict: false }); cookie = webidl.converters.Cookie(cookie); - const str = stringify2(cookie); + const str = stringify(cookie); if (str) { headers.append("Set-Cookie", str); } @@ -33083,15 +33059,15 @@ var require_connection2 = __commonJS({ var { Headers: Headers2 } = require_headers2(); var { getDecodeSplit } = require_util9(); var { kHeadersList } = require_symbols6(); - var crypto4; + var crypto; try { - crypto4 = require("crypto"); + crypto = require("crypto"); } catch { } function establishWebSocketConnection(url, protocols, ws, onEstablish, options) { const requestURL = url; requestURL.protocol = url.protocol === "ws:" ? "http:" : "https:"; - const request2 = makeRequest({ + const request3 = makeRequest({ urlList: [requestURL], serviceWorkers: "none", referrer: "no-referrer", @@ -33102,17 +33078,17 @@ var require_connection2 = __commonJS({ }); if (options.headers) { const headersList = new Headers2(options.headers)[kHeadersList]; - request2.headersList = headersList; + request3.headersList = headersList; } - const keyValue = crypto4.randomBytes(16).toString("base64"); - request2.headersList.append("sec-websocket-key", keyValue); - request2.headersList.append("sec-websocket-version", "13"); + const keyValue = crypto.randomBytes(16).toString("base64"); + request3.headersList.append("sec-websocket-key", keyValue); + request3.headersList.append("sec-websocket-version", "13"); for (const protocol of protocols) { - request2.headersList.append("sec-websocket-protocol", protocol); + request3.headersList.append("sec-websocket-protocol", protocol); } const permessageDeflate = ""; const controller = fetching({ - request: request2, + request: request3, useParallelQueue: true, dispatcher: options.dispatcher, processResponse(response) { @@ -33133,7 +33109,7 @@ var require_connection2 = __commonJS({ return; } const secWSAccept = response.headersList.get("Sec-WebSocket-Accept"); - const digest = crypto4.createHash("sha1").update(keyValue + uid).digest("base64"); + const digest = crypto.createHash("sha1").update(keyValue + uid).digest("base64"); if (secWSAccept !== digest) { failWebsocketConnection(ws, "Incorrect hash received in Sec-WebSocket-Accept header."); return; @@ -33145,7 +33121,7 @@ var require_connection2 = __commonJS({ } const secProtocol = response.headersList.get("Sec-WebSocket-Protocol"); if (secProtocol !== null) { - const requestProtocols = getDecodeSplit("sec-websocket-protocol", request2.headersList); + const requestProtocols = getDecodeSplit("sec-websocket-protocol", request3.headersList); if (!requestProtocols.includes(secProtocol)) { failWebsocketConnection(ws, "Protocol was not set in the opening handshake."); return; @@ -33216,9 +33192,9 @@ var require_frame2 = __commonJS({ "node_modules/undici/lib/web/websocket/frame.js"(exports2, module2) { "use strict"; var { maxUnsigned16Bit } = require_constants10(); - var crypto4; + var crypto; try { - crypto4 = require("crypto"); + crypto = require("crypto"); } catch { } var WebsocketFrameSend = class { @@ -33227,7 +33203,7 @@ var require_frame2 = __commonJS({ */ constructor(data) { this.frameData = data; - this.maskKey = crypto4.randomBytes(4); + this.maskKey = crypto.randomBytes(4); } createFrame(opcode) { const bodyLength = this.frameData?.byteLength ?? 0; @@ -33806,12 +33782,12 @@ var require_websocket2 = __commonJS({ */ #onConnectionEstablished(response) { this[kResponse] = response; - const parser3 = new ByteParser(this); - parser3.on("drain", function onParserDrain() { + const parser4 = new ByteParser(this); + parser4.on("drain", function onParserDrain() { this.ws[kResponse].socket.resume(); }); response.socket.ws = this; - this[kByteParser] = parser3; + this[kByteParser] = parser4; this[kReadyState] = states.OPEN; const extensions = response.headersList.get("sec-websocket-extensions"); if (extensions !== null) { @@ -33918,11 +33894,9 @@ var require_util15 = __commonJS({ return value.indexOf("\0") === -1; } function isASCIINumber(value) { - if (value.length === 0) - return false; + if (value.length === 0) return false; for (let i2 = 0; i2 < value.length; i2++) { - if (value.charCodeAt(i2) < 48 || value.charCodeAt(i2) > 57) - return false; + if (value.charCodeAt(i2) < 48 || value.charCodeAt(i2) > 57) return false; } return true; } @@ -34288,8 +34262,7 @@ var require_eventsource = __commonJS({ return this.#withCredentials; } #connect() { - if (this.#readyState === CLOSED) - return; + if (this.#readyState === CLOSED) return; this.#readyState = CONNECTING; const fetchParam = { request: this.#request @@ -34351,13 +34324,11 @@ var require_eventsource = __commonJS({ * @returns {Promise} */ async #reconnect() { - if (this.#readyState === CLOSED) - return; + if (this.#readyState === CLOSED) return; this.#readyState = CONNECTING; this.dispatchEvent(new Event("error")); await delay2(this.#settings.reconnectionTime); - if (this.#readyState !== CONNECTING) - return; + if (this.#readyState !== CONNECTING) return; if (this.#settings.lastEventId !== "") { this.#request.headersList.set("last-event-id", this.#settings.lastEventId, true); } @@ -34369,8 +34340,7 @@ var require_eventsource = __commonJS({ */ close() { webidl.brandCheck(this, _EventSource); - if (this.#readyState === CLOSED) - return; + if (this.#readyState === CLOSED) return; this.#readyState = CLOSED; clearTimeout(this.#settings.reconnectionTimer); this.#controller.abort(); @@ -34513,9 +34483,9 @@ var require_undici2 = __commonJS({ headerNameToString: util.headerNameToString }; function makeDispatcher(fn) { - return (url, opts, handler2) => { + return (url, opts, handler3) => { if (typeof opts === "function") { - handler2 = opts; + handler3 = opts; opts = null; } if (!url || typeof url !== "string" && typeof url !== "object" && !(url instanceof URL)) { @@ -34548,7 +34518,7 @@ var require_undici2 = __commonJS({ origin: url.origin, path: url.search ? `${url.pathname}${url.search}` : url.pathname, method: opts.method || (opts.body ? "PUT" : "GET") - }, handler2); + }, handler3); }; } module2.exports.setGlobalDispatcher = setGlobalDispatcher; @@ -34608,8 +34578,7 @@ var require_utils3 = __commonJS({ "node_modules/@actions/github/lib/internal/utils.js"(exports2) { "use strict"; var __createBinding = exports2 && exports2.__createBinding || (Object.create ? function(o, m2, k, k2) { - if (k2 === void 0) - k2 = k; + if (k2 === void 0) k2 = k; var desc = Object.getOwnPropertyDescriptor(m2, k); if (!desc || ("get" in desc ? !m2.__esModule : desc.writable || desc.configurable)) { desc = { enumerable: true, get: function() { @@ -34618,8 +34587,7 @@ var require_utils3 = __commonJS({ } Object.defineProperty(o, k2, desc); } : function(o, m2, k, k2) { - if (k2 === void 0) - k2 = k; + if (k2 === void 0) k2 = k; o[k2] = m2[k]; }); var __setModuleDefault = exports2 && exports2.__setModuleDefault || (Object.create ? function(o, v) { @@ -34628,13 +34596,10 @@ var require_utils3 = __commonJS({ o["default"] = v; }); var __importStar = exports2 && exports2.__importStar || function(mod) { - if (mod && mod.__esModule) - return mod; + if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) { - for (var k in mod) - if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) - __createBinding(result, mod, k); + for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); } __setModuleDefault(result, mod); return result; @@ -34704,7 +34669,7 @@ var require_utils3 = __commonJS({ } }); -// node_modules/universal-user-agent/dist-web/index.js +// node_modules/@octokit/core/node_modules/universal-user-agent/dist-web/index.js function getUserAgent() { if (typeof navigator === "object" && "userAgent" in navigator) { return navigator.userAgent; @@ -34715,7 +34680,7 @@ function getUserAgent() { return ""; } var init_dist_web = __esm({ - "node_modules/universal-user-agent/dist-web/index.js"() { + "node_modules/@octokit/core/node_modules/universal-user-agent/dist-web/index.js"() { "use strict"; } }); @@ -34724,8 +34689,8 @@ var init_dist_web = __esm({ var require_register = __commonJS({ "node_modules/before-after-hook/lib/register.js"(exports2, module2) { "use strict"; - module2.exports = register; - function register(state, name, method, options) { + module2.exports = register2; + function register2(state, name, method, options) { if (typeof method !== "function") { throw new Error("method for before hook must be a function"); } @@ -34734,7 +34699,7 @@ var require_register = __commonJS({ } if (Array.isArray(name)) { return name.reverse().reduce(function(callback, name2) { - return register.bind(null, state, name2, callback, options); + return register2.bind(null, state, name2, callback, options); }, method)(); } return Promise.resolve().then(function() { @@ -34753,19 +34718,19 @@ var require_register = __commonJS({ var require_add = __commonJS({ "node_modules/before-after-hook/lib/add.js"(exports2, module2) { "use strict"; - module2.exports = addHook; - function addHook(state, kind, name, hook2) { - var orig = hook2; + module2.exports = addHook2; + function addHook2(state, kind, name, hook4) { + var orig = hook4; if (!state.registry[name]) { state.registry[name] = []; } if (kind === "before") { - hook2 = function(method, options) { + hook4 = function(method, options) { return Promise.resolve().then(orig.bind(null, options)).then(method.bind(null, options)); }; } if (kind === "after") { - hook2 = function(method, options) { + hook4 = function(method, options) { var result; return Promise.resolve().then(method.bind(null, options)).then(function(result_) { result = result_; @@ -34776,14 +34741,14 @@ var require_add = __commonJS({ }; } if (kind === "error") { - hook2 = function(method, options) { + hook4 = function(method, options) { return Promise.resolve().then(method.bind(null, options)).catch(function(error) { return orig(error, options); }); }; } state.registry[name].push({ - hook: hook2, + hook: hook4, orig }); } @@ -34794,8 +34759,8 @@ var require_add = __commonJS({ var require_remove = __commonJS({ "node_modules/before-after-hook/lib/remove.js"(exports2, module2) { "use strict"; - module2.exports = removeHook; - function removeHook(state, name, method) { + module2.exports = removeHook2; + function removeHook2(state, name, method) { if (!state.registry[name]) { return; } @@ -34814,21 +34779,21 @@ var require_remove = __commonJS({ var require_before_after_hook = __commonJS({ "node_modules/before-after-hook/index.js"(exports2, module2) { "use strict"; - var register = require_register(); - var addHook = require_add(); - var removeHook = require_remove(); - var bind = Function.bind; - var bindable = bind.bind(bind); - function bindApi(hook2, state, name) { - var removeHookRef = bindable(removeHook, null).apply( + var register2 = require_register(); + var addHook2 = require_add(); + var removeHook2 = require_remove(); + var bind2 = Function.bind; + var bindable2 = bind2.bind(bind2); + function bindApi2(hook4, state, name) { + var removeHookRef = bindable2(removeHook2, null).apply( null, name ? [state, name] : [state] ); - hook2.api = { remove: removeHookRef }; - hook2.remove = removeHookRef; + hook4.api = { remove: removeHookRef }; + hook4.remove = removeHookRef; ["before", "error", "after", "wrap"].forEach(function(kind) { var args = name ? [state, kind, name] : [state, kind]; - hook2[kind] = hook2.api[kind] = bindable(addHook, null).apply(null, args); + hook4[kind] = hook4.api[kind] = bindable2(addHook2, null).apply(null, args); }); } function HookSingular() { @@ -34836,17 +34801,17 @@ var require_before_after_hook = __commonJS({ var singularHookState = { registry: {} }; - var singularHook = register.bind(null, singularHookState, singularHookName); - bindApi(singularHook, singularHookState, singularHookName); + var singularHook = register2.bind(null, singularHookState, singularHookName); + bindApi2(singularHook, singularHookState, singularHookName); return singularHook; } function HookCollection() { var state = { registry: {} }; - var hook2 = register.bind(null, state); - bindApi(hook2, state); - return hook2; + var hook4 = register2.bind(null, state); + bindApi2(hook4, state); + return hook4; } var collectionHookDeprecationMessageDisplayed = false; function Hook() { @@ -34867,19 +34832,19 @@ var require_before_after_hook = __commonJS({ } }); -// node_modules/@octokit/endpoint/dist-src/version.js +// node_modules/@octokit/core/node_modules/@octokit/request/node_modules/@octokit/endpoint/dist-src/version.js var VERSION; var init_version = __esm({ - "node_modules/@octokit/endpoint/dist-src/version.js"() { + "node_modules/@octokit/core/node_modules/@octokit/request/node_modules/@octokit/endpoint/dist-src/version.js"() { "use strict"; VERSION = "9.0.5"; } }); -// node_modules/@octokit/endpoint/dist-src/defaults.js +// node_modules/@octokit/core/node_modules/@octokit/request/node_modules/@octokit/endpoint/dist-src/defaults.js var userAgent, DEFAULTS; var init_defaults = __esm({ - "node_modules/@octokit/endpoint/dist-src/defaults.js"() { + "node_modules/@octokit/core/node_modules/@octokit/request/node_modules/@octokit/endpoint/dist-src/defaults.js"() { "use strict"; init_dist_web(); init_version(); @@ -34898,7 +34863,7 @@ var init_defaults = __esm({ } }); -// node_modules/@octokit/endpoint/dist-src/util/lowercase-keys.js +// node_modules/@octokit/core/node_modules/@octokit/request/node_modules/@octokit/endpoint/dist-src/util/lowercase-keys.js function lowercaseKeys(object) { if (!object) { return {}; @@ -34909,12 +34874,12 @@ function lowercaseKeys(object) { }, {}); } var init_lowercase_keys = __esm({ - "node_modules/@octokit/endpoint/dist-src/util/lowercase-keys.js"() { + "node_modules/@octokit/core/node_modules/@octokit/request/node_modules/@octokit/endpoint/dist-src/util/lowercase-keys.js"() { "use strict"; } }); -// node_modules/@octokit/endpoint/dist-src/util/is-plain-object.js +// node_modules/@octokit/core/node_modules/@octokit/request/node_modules/@octokit/endpoint/dist-src/util/is-plain-object.js function isPlainObject(value) { if (typeof value !== "object" || value === null) return false; @@ -34927,12 +34892,12 @@ function isPlainObject(value) { return typeof Ctor === "function" && Ctor instanceof Ctor && Function.prototype.call(Ctor) === Function.prototype.call(value); } var init_is_plain_object = __esm({ - "node_modules/@octokit/endpoint/dist-src/util/is-plain-object.js"() { + "node_modules/@octokit/core/node_modules/@octokit/request/node_modules/@octokit/endpoint/dist-src/util/is-plain-object.js"() { "use strict"; } }); -// node_modules/@octokit/endpoint/dist-src/util/merge-deep.js +// node_modules/@octokit/core/node_modules/@octokit/request/node_modules/@octokit/endpoint/dist-src/util/merge-deep.js function mergeDeep(defaults2, options) { const result = Object.assign({}, defaults2); Object.keys(options).forEach((key) => { @@ -34948,13 +34913,13 @@ function mergeDeep(defaults2, options) { return result; } var init_merge_deep = __esm({ - "node_modules/@octokit/endpoint/dist-src/util/merge-deep.js"() { + "node_modules/@octokit/core/node_modules/@octokit/request/node_modules/@octokit/endpoint/dist-src/util/merge-deep.js"() { "use strict"; init_is_plain_object(); } }); -// node_modules/@octokit/endpoint/dist-src/util/remove-undefined-properties.js +// node_modules/@octokit/core/node_modules/@octokit/request/node_modules/@octokit/endpoint/dist-src/util/remove-undefined-properties.js function removeUndefinedProperties(obj) { for (const key in obj) { if (obj[key] === void 0) { @@ -34964,12 +34929,12 @@ function removeUndefinedProperties(obj) { return obj; } var init_remove_undefined_properties = __esm({ - "node_modules/@octokit/endpoint/dist-src/util/remove-undefined-properties.js"() { + "node_modules/@octokit/core/node_modules/@octokit/request/node_modules/@octokit/endpoint/dist-src/util/remove-undefined-properties.js"() { "use strict"; } }); -// node_modules/@octokit/endpoint/dist-src/merge.js +// node_modules/@octokit/core/node_modules/@octokit/request/node_modules/@octokit/endpoint/dist-src/merge.js function merge(defaults2, route, options) { if (typeof route === "string") { let [method, url] = route.split(" "); @@ -34992,7 +34957,7 @@ function merge(defaults2, route, options) { return mergedOptions; } var init_merge = __esm({ - "node_modules/@octokit/endpoint/dist-src/merge.js"() { + "node_modules/@octokit/core/node_modules/@octokit/request/node_modules/@octokit/endpoint/dist-src/merge.js"() { "use strict"; init_lowercase_keys(); init_merge_deep(); @@ -35000,7 +34965,7 @@ var init_merge = __esm({ } }); -// node_modules/@octokit/endpoint/dist-src/util/add-query-parameters.js +// node_modules/@octokit/core/node_modules/@octokit/request/node_modules/@octokit/endpoint/dist-src/util/add-query-parameters.js function addQueryParameters(url, parameters) { const separator = /\?/.test(url) ? "&" : "?"; const names = Object.keys(parameters); @@ -35015,12 +34980,12 @@ function addQueryParameters(url, parameters) { }).join("&"); } var init_add_query_parameters = __esm({ - "node_modules/@octokit/endpoint/dist-src/util/add-query-parameters.js"() { + "node_modules/@octokit/core/node_modules/@octokit/request/node_modules/@octokit/endpoint/dist-src/util/add-query-parameters.js"() { "use strict"; } }); -// node_modules/@octokit/endpoint/dist-src/util/extract-url-variable-names.js +// node_modules/@octokit/core/node_modules/@octokit/request/node_modules/@octokit/endpoint/dist-src/util/extract-url-variable-names.js function removeNonChars(variableName) { return variableName.replace(/^\W+|\W+$/g, "").split(/,/); } @@ -35033,13 +34998,13 @@ function extractUrlVariableNames(url) { } var urlVariableRegex; var init_extract_url_variable_names = __esm({ - "node_modules/@octokit/endpoint/dist-src/util/extract-url-variable-names.js"() { + "node_modules/@octokit/core/node_modules/@octokit/request/node_modules/@octokit/endpoint/dist-src/util/extract-url-variable-names.js"() { "use strict"; urlVariableRegex = /\{[^}]+\}/g; } }); -// node_modules/@octokit/endpoint/dist-src/util/omit.js +// node_modules/@octokit/core/node_modules/@octokit/request/node_modules/@octokit/endpoint/dist-src/util/omit.js function omit(object, keysToOmit) { const result = { __proto__: null }; for (const key of Object.keys(object)) { @@ -35050,12 +35015,12 @@ function omit(object, keysToOmit) { return result; } var init_omit = __esm({ - "node_modules/@octokit/endpoint/dist-src/util/omit.js"() { + "node_modules/@octokit/core/node_modules/@octokit/request/node_modules/@octokit/endpoint/dist-src/util/omit.js"() { "use strict"; } }); -// node_modules/@octokit/endpoint/dist-src/util/url-template.js +// node_modules/@octokit/core/node_modules/@octokit/request/node_modules/@octokit/endpoint/dist-src/util/url-template.js function encodeReserved(str) { return str.split(/(%[0-9A-Fa-f]{2})/g).map(function(part) { if (!/%[0-9A-Fa-f]/.test(part)) { @@ -35187,12 +35152,12 @@ function expand(template, context2) { } } var init_url_template = __esm({ - "node_modules/@octokit/endpoint/dist-src/util/url-template.js"() { + "node_modules/@octokit/core/node_modules/@octokit/request/node_modules/@octokit/endpoint/dist-src/util/url-template.js"() { "use strict"; } }); -// node_modules/@octokit/endpoint/dist-src/parse.js +// node_modules/@octokit/core/node_modules/@octokit/request/node_modules/@octokit/endpoint/dist-src/parse.js function parse(options) { let method = options.method.toUpperCase(); let url = (options.url || "/").replace(/:([a-z]\w+)/g, "{$1}"); @@ -35257,7 +35222,7 @@ function parse(options) { ); } var init_parse = __esm({ - "node_modules/@octokit/endpoint/dist-src/parse.js"() { + "node_modules/@octokit/core/node_modules/@octokit/request/node_modules/@octokit/endpoint/dist-src/parse.js"() { "use strict"; init_add_query_parameters(); init_extract_url_variable_names(); @@ -35266,31 +35231,31 @@ var init_parse = __esm({ } }); -// node_modules/@octokit/endpoint/dist-src/endpoint-with-defaults.js +// node_modules/@octokit/core/node_modules/@octokit/request/node_modules/@octokit/endpoint/dist-src/endpoint-with-defaults.js function endpointWithDefaults(defaults2, route, options) { return parse(merge(defaults2, route, options)); } var init_endpoint_with_defaults = __esm({ - "node_modules/@octokit/endpoint/dist-src/endpoint-with-defaults.js"() { + "node_modules/@octokit/core/node_modules/@octokit/request/node_modules/@octokit/endpoint/dist-src/endpoint-with-defaults.js"() { "use strict"; init_merge(); init_parse(); } }); -// node_modules/@octokit/endpoint/dist-src/with-defaults.js +// node_modules/@octokit/core/node_modules/@octokit/request/node_modules/@octokit/endpoint/dist-src/with-defaults.js function withDefaults(oldDefaults, newDefaults) { - const DEFAULTS3 = merge(oldDefaults, newDefaults); - const endpoint2 = endpointWithDefaults.bind(null, DEFAULTS3); - return Object.assign(endpoint2, { - DEFAULTS: DEFAULTS3, - defaults: withDefaults.bind(null, DEFAULTS3), - merge: merge.bind(null, DEFAULTS3), + const DEFAULTS4 = merge(oldDefaults, newDefaults); + const endpoint3 = endpointWithDefaults.bind(null, DEFAULTS4); + return Object.assign(endpoint3, { + DEFAULTS: DEFAULTS4, + defaults: withDefaults.bind(null, DEFAULTS4), + merge: merge.bind(null, DEFAULTS4), parse }); } var init_with_defaults = __esm({ - "node_modules/@octokit/endpoint/dist-src/with-defaults.js"() { + "node_modules/@octokit/core/node_modules/@octokit/request/node_modules/@octokit/endpoint/dist-src/with-defaults.js"() { "use strict"; init_endpoint_with_defaults(); init_merge(); @@ -35298,10 +35263,10 @@ var init_with_defaults = __esm({ } }); -// node_modules/@octokit/endpoint/dist-src/index.js +// node_modules/@octokit/core/node_modules/@octokit/request/node_modules/@octokit/endpoint/dist-src/index.js var endpoint; var init_dist_src = __esm({ - "node_modules/@octokit/endpoint/dist-src/index.js"() { + "node_modules/@octokit/core/node_modules/@octokit/request/node_modules/@octokit/endpoint/dist-src/index.js"() { "use strict"; init_with_defaults(); init_defaults(); @@ -35309,16 +35274,16 @@ var init_dist_src = __esm({ } }); -// node_modules/@octokit/request/dist-src/version.js +// node_modules/@octokit/core/node_modules/@octokit/request/dist-src/version.js var VERSION2; var init_version2 = __esm({ - "node_modules/@octokit/request/dist-src/version.js"() { + "node_modules/@octokit/core/node_modules/@octokit/request/dist-src/version.js"() { "use strict"; VERSION2 = "8.3.1"; } }); -// node_modules/@octokit/request/dist-src/is-plain-object.js +// node_modules/@octokit/core/node_modules/@octokit/request/dist-src/is-plain-object.js function isPlainObject2(value) { if (typeof value !== "object" || value === null) return false; @@ -35331,7 +35296,7 @@ function isPlainObject2(value) { return typeof Ctor === "function" && Ctor instanceof Ctor && Function.prototype.call(Ctor) === Function.prototype.call(value); } var init_is_plain_object2 = __esm({ - "node_modules/@octokit/request/dist-src/is-plain-object.js"() { + "node_modules/@octokit/core/node_modules/@octokit/request/dist-src/is-plain-object.js"() { "use strict"; } }); @@ -35359,8 +35324,7 @@ var require_wrappy = __commonJS({ "use strict"; module2.exports = wrappy; function wrappy(fn, cb) { - if (fn && cb) - return wrappy(fn)(cb); + if (fn && cb) return wrappy(fn)(cb); if (typeof fn !== "function") throw new TypeError("need wrapper function"); Object.keys(fn).forEach(function(k) { @@ -35408,8 +35372,7 @@ var require_once = __commonJS({ }); function once2(fn) { var f3 = function() { - if (f3.called) - return f3.value; + if (f3.called) return f3.value; f3.called = true; return f3.value = fn.apply(this, arguments); }; @@ -35492,17 +35455,17 @@ var init_dist_src2 = __esm({ } }); -// node_modules/@octokit/request/dist-src/get-buffer-response.js +// node_modules/@octokit/core/node_modules/@octokit/request/dist-src/get-buffer-response.js function getBufferResponse(response) { return response.arrayBuffer(); } var init_get_buffer_response = __esm({ - "node_modules/@octokit/request/dist-src/get-buffer-response.js"() { + "node_modules/@octokit/core/node_modules/@octokit/request/dist-src/get-buffer-response.js"() { "use strict"; } }); -// node_modules/@octokit/request/dist-src/fetch-wrapper.js +// node_modules/@octokit/core/node_modules/@octokit/request/dist-src/fetch-wrapper.js function fetchWrapper(requestOptions) { const log = requestOptions.request && requestOptions.request.log ? requestOptions.request.log : console; const parseSuccessResponseBody = requestOptions.request?.parseSuccessResponseBody !== false; @@ -35637,7 +35600,7 @@ function toErrorMessage(data) { return `Unknown error: ${JSON.stringify(data)}`; } var init_fetch_wrapper = __esm({ - "node_modules/@octokit/request/dist-src/fetch-wrapper.js"() { + "node_modules/@octokit/core/node_modules/@octokit/request/dist-src/fetch-wrapper.js"() { "use strict"; init_is_plain_object2(); init_dist_src2(); @@ -35645,41 +35608,41 @@ var init_fetch_wrapper = __esm({ } }); -// node_modules/@octokit/request/dist-src/with-defaults.js +// node_modules/@octokit/core/node_modules/@octokit/request/dist-src/with-defaults.js function withDefaults2(oldEndpoint, newDefaults) { - const endpoint2 = oldEndpoint.defaults(newDefaults); + const endpoint3 = oldEndpoint.defaults(newDefaults); const newApi = function(route, parameters) { - const endpointOptions = endpoint2.merge(route, parameters); + const endpointOptions = endpoint3.merge(route, parameters); if (!endpointOptions.request || !endpointOptions.request.hook) { - return fetchWrapper(endpoint2.parse(endpointOptions)); + return fetchWrapper(endpoint3.parse(endpointOptions)); } - const request2 = (route2, parameters2) => { + const request3 = (route2, parameters2) => { return fetchWrapper( - endpoint2.parse(endpoint2.merge(route2, parameters2)) + endpoint3.parse(endpoint3.merge(route2, parameters2)) ); }; - Object.assign(request2, { - endpoint: endpoint2, - defaults: withDefaults2.bind(null, endpoint2) + Object.assign(request3, { + endpoint: endpoint3, + defaults: withDefaults2.bind(null, endpoint3) }); - return endpointOptions.request.hook(request2, endpointOptions); + return endpointOptions.request.hook(request3, endpointOptions); }; return Object.assign(newApi, { - endpoint: endpoint2, - defaults: withDefaults2.bind(null, endpoint2) + endpoint: endpoint3, + defaults: withDefaults2.bind(null, endpoint3) }); } var init_with_defaults2 = __esm({ - "node_modules/@octokit/request/dist-src/with-defaults.js"() { + "node_modules/@octokit/core/node_modules/@octokit/request/dist-src/with-defaults.js"() { "use strict"; init_fetch_wrapper(); } }); -// node_modules/@octokit/request/dist-src/index.js +// node_modules/@octokit/core/node_modules/@octokit/request/dist-src/index.js var request; var init_dist_src3 = __esm({ - "node_modules/@octokit/request/dist-src/index.js"() { + "node_modules/@octokit/core/node_modules/@octokit/request/dist-src/index.js"() { "use strict"; init_dist_src(); init_dist_web(); @@ -35698,7 +35661,7 @@ function _buildMessageForResponseErrors(data) { return `Request failed due to following response errors: ` + data.errors.map((e2) => ` - ${e2.message}`).join("\n"); } -function graphql(request2, query, options) { +function graphql(request22, query, options) { if (options) { if (typeof query === "string" && "query" in options) { return Promise.reject( @@ -35729,11 +35692,11 @@ function graphql(request2, query, options) { result.variables[key] = parsedOptions[key]; return result; }, {}); - const baseUrl = parsedOptions.baseUrl || request2.endpoint.DEFAULTS.baseUrl; + const baseUrl = parsedOptions.baseUrl || request22.endpoint.DEFAULTS.baseUrl; if (GHES_V3_SUFFIX_REGEX.test(baseUrl)) { requestOptions.url = baseUrl.replace(GHES_V3_SUFFIX_REGEX, "/api/graphql"); } - return request2(requestOptions).then((response) => { + return request22(requestOptions).then((response) => { if (response.data.errors) { const headers = {}; for (const key of Object.keys(response.headers)) { @@ -35748,8 +35711,8 @@ function graphql(request2, query, options) { return response.data.data; }); } -function withDefaults3(request2, newDefaults) { - const newRequest = request2.defaults(newDefaults); +function withDefaults3(request22, newDefaults) { + const newRequest = request22.defaults(newDefaults); const newApi = (query, options) => { return graphql(newRequest, query, options); }; @@ -35772,9 +35735,9 @@ var init_dist_web3 = __esm({ init_dist_web(); VERSION3 = "7.1.0"; GraphqlResponseError = class extends Error { - constructor(request2, headers, response) { + constructor(request22, headers, response) { super(_buildMessageForResponseErrors(response)); - this.request = request2; + this.request = request22; this.headers = headers; this.response = response; this.name = "GraphqlResponseError"; @@ -35842,13 +35805,13 @@ var init_with_authorization_prefix = __esm({ }); // node_modules/@octokit/auth-token/dist-src/hook.js -async function hook(token, request2, route, parameters) { - const endpoint2 = request2.endpoint.merge( +async function hook(token, request3, route, parameters) { + const endpoint3 = request3.endpoint.merge( route, parameters ); - endpoint2.headers.authorization = withAuthorizationPrefix(token); - return request2(endpoint2); + endpoint3.headers.authorization = withAuthorizationPrefix(token); + return request3(endpoint3); } var init_hook = __esm({ "node_modules/@octokit/auth-token/dist-src/hook.js"() { @@ -35858,10 +35821,6 @@ var init_hook = __esm({ }); // node_modules/@octokit/auth-token/dist-src/index.js -var dist_src_exports = {}; -__export(dist_src_exports, { - createTokenAuth: () => createTokenAuth -}); var createTokenAuth; var init_dist_src4 = __esm({ "node_modules/@octokit/auth-token/dist-src/index.js"() { @@ -35952,13 +35911,13 @@ var init_dist_web4 = __esm({ return NewOctokit; } constructor(options = {}) { - const hook2 = new import_before_after_hook.Collection(); + const hook4 = new import_before_after_hook.Collection(); const requestDefaults = { baseUrl: request.endpoint.DEFAULTS.baseUrl, headers: {}, request: Object.assign({}, options.request, { // @ts-ignore internal usage only, no need to type - hook: hook2.bind(null, "request") + hook: hook4.bind(null, "request") }), mediaType: { previews: [], @@ -35986,20 +35945,20 @@ var init_dist_web4 = __esm({ }, options.log ); - this.hook = hook2; + this.hook = hook4; if (!options.authStrategy) { if (!options.auth) { this.auth = async () => ({ type: "unauthenticated" }); } else { - const auth2 = createTokenAuth(options.auth); - hook2.wrap("request", auth2.hook); - this.auth = auth2; + const auth4 = createTokenAuth(options.auth); + hook4.wrap("request", auth4.hook); + this.auth = auth4; } } else { const { authStrategy, ...otherOptions } = options; - const auth2 = authStrategy( + const auth4 = authStrategy( Object.assign( { request: this.request, @@ -36015,8 +35974,8 @@ var init_dist_web4 = __esm({ options.auth ) ); - hook2.wrap("request", auth2.hook); - this.auth = auth2; + hook4.wrap("request", auth4.hook); + this.auth = auth4; } const classConstructor = this.constructor; for (let i2 = 0; i2 < classConstructor.plugins.length; ++i2) { @@ -38089,8 +38048,8 @@ var init_endpoints_to_methods = __esm({ init_endpoints(); endpointMethodsMap = /* @__PURE__ */ new Map(); for (const [scope, endpoints] of Object.entries(endpoints_default)) { - for (const [methodName, endpoint2] of Object.entries(endpoints)) { - const [route, defaults2, decorations] = endpoint2; + for (const [methodName, endpoint3] of Object.entries(endpoints)) { + const [route, defaults2, decorations] = endpoint3; const [method, url] = route.split(/ /); const endpointDefaults = Object.assign( { @@ -38164,8 +38123,8 @@ var init_endpoints_to_methods = __esm({ }); // node_modules/@octokit/plugin-rest-endpoint-methods/dist-src/index.js -var dist_src_exports2 = {}; -__export(dist_src_exports2, { +var dist_src_exports = {}; +__export(dist_src_exports, { legacyRestEndpointMethods: () => legacyRestEndpointMethods, restEndpointMethods: () => restEndpointMethods }); @@ -38274,8 +38233,8 @@ function paginate(octokit, route, parameters, mapFn) { mapFn ); } -function gather(octokit, results, iterator2, mapFn) { - return iterator2.next().then((result) => { +function gather(octokit, results, iterator22, mapFn) { + return iterator22.next().then((result) => { if (result.done) { return results; } @@ -38289,7 +38248,7 @@ function gather(octokit, results, iterator2, mapFn) { if (earlyExit) { return results; } - return gather(octokit, results, iterator2, mapFn); + return gather(octokit, results, iterator22, mapFn); }); } function isPaginatingEndpoint(arg) { @@ -38559,8 +38518,7 @@ var require_utils4 = __commonJS({ "node_modules/@actions/github/lib/utils.js"(exports2) { "use strict"; var __createBinding = exports2 && exports2.__createBinding || (Object.create ? function(o, m2, k, k2) { - if (k2 === void 0) - k2 = k; + if (k2 === void 0) k2 = k; var desc = Object.getOwnPropertyDescriptor(m2, k); if (!desc || ("get" in desc ? !m2.__esModule : desc.writable || desc.configurable)) { desc = { enumerable: true, get: function() { @@ -38569,8 +38527,7 @@ var require_utils4 = __commonJS({ } Object.defineProperty(o, k2, desc); } : function(o, m2, k, k2) { - if (k2 === void 0) - k2 = k; + if (k2 === void 0) k2 = k; o[k2] = m2[k]; }); var __setModuleDefault = exports2 && exports2.__setModuleDefault || (Object.create ? function(o, v) { @@ -38579,13 +38536,10 @@ var require_utils4 = __commonJS({ o["default"] = v; }); var __importStar = exports2 && exports2.__importStar || function(mod) { - if (mod && mod.__esModule) - return mod; + if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) { - for (var k in mod) - if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) - __createBinding(result, mod, k); + for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); } __setModuleDefault(result, mod); return result; @@ -38595,7 +38549,7 @@ var require_utils4 = __commonJS({ var Context = __importStar(require_context()); var Utils = __importStar(require_utils3()); var core_1 = (init_dist_web4(), __toCommonJS(dist_web_exports)); - var plugin_rest_endpoint_methods_1 = (init_dist_src5(), __toCommonJS(dist_src_exports2)); + var plugin_rest_endpoint_methods_1 = (init_dist_src5(), __toCommonJS(dist_src_exports)); var plugin_paginate_rest_1 = (init_dist_web5(), __toCommonJS(dist_web_exports2)); exports2.context = new Context.Context(); var baseUrl = Utils.getApiBaseUrl(); @@ -38609,9 +38563,9 @@ var require_utils4 = __commonJS({ exports2.GitHub = core_1.Octokit.plugin(plugin_rest_endpoint_methods_1.restEndpointMethods, plugin_paginate_rest_1.paginateRest).defaults(exports2.defaults); function getOctokitOptions(token, options) { const opts = Object.assign({}, options || {}); - const auth2 = Utils.getAuthString(token, opts); - if (auth2) { - opts.auth = auth2; + const auth4 = Utils.getAuthString(token, opts); + if (auth4) { + opts.auth = auth4; } return opts; } @@ -38624,8 +38578,7 @@ var require_github = __commonJS({ "node_modules/@actions/github/lib/github.js"(exports2) { "use strict"; var __createBinding = exports2 && exports2.__createBinding || (Object.create ? function(o, m2, k, k2) { - if (k2 === void 0) - k2 = k; + if (k2 === void 0) k2 = k; var desc = Object.getOwnPropertyDescriptor(m2, k); if (!desc || ("get" in desc ? !m2.__esModule : desc.writable || desc.configurable)) { desc = { enumerable: true, get: function() { @@ -38634,8 +38587,7 @@ var require_github = __commonJS({ } Object.defineProperty(o, k2, desc); } : function(o, m2, k, k2) { - if (k2 === void 0) - k2 = k; + if (k2 === void 0) k2 = k; o[k2] = m2[k]; }); var __setModuleDefault = exports2 && exports2.__setModuleDefault || (Object.create ? function(o, v) { @@ -38644,13 +38596,10 @@ var require_github = __commonJS({ o["default"] = v; }); var __importStar = exports2 && exports2.__importStar || function(mod) { - if (mod && mod.__esModule) - return mod; + if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) { - for (var k in mod) - if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) - __createBinding(result, mod, k); + for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); } __setModuleDefault(result, mod); return result; @@ -38705,14 +38654,16 @@ var require_command = __commonJS({ "node_modules/@actions/core/lib/command.js"(exports2) { "use strict"; var __createBinding = exports2 && exports2.__createBinding || (Object.create ? function(o, m2, k, k2) { - if (k2 === void 0) - k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { - return m2[k]; - } }); + if (k2 === void 0) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m2, k); + if (!desc || ("get" in desc ? !m2.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { + return m2[k]; + } }; + } + Object.defineProperty(o, k2, desc); } : function(o, m2, k, k2) { - if (k2 === void 0) - k2 = k; + if (k2 === void 0) k2 = k; o[k2] = m2[k]; }); var __setModuleDefault = exports2 && exports2.__setModuleDefault || (Object.create ? function(o, v) { @@ -38721,13 +38672,10 @@ var require_command = __commonJS({ o["default"] = v; }); var __importStar = exports2 && exports2.__importStar || function(mod) { - if (mod && mod.__esModule) - return mod; + if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) { - for (var k in mod) - if (k !== "default" && Object.hasOwnProperty.call(mod, k)) - __createBinding(result, mod, k); + for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); } __setModuleDefault(result, mod); return result; @@ -38779,359 +38727,11 @@ var require_command = __commonJS({ } }; function escapeData(s2) { - return utils_1.toCommandValue(s2).replace(/%/g, "%25").replace(/\r/g, "%0D").replace(/\n/g, "%0A"); + return (0, utils_1.toCommandValue)(s2).replace(/%/g, "%25").replace(/\r/g, "%0D").replace(/\n/g, "%0A"); } function escapeProperty(s2) { - return utils_1.toCommandValue(s2).replace(/%/g, "%25").replace(/\r/g, "%0D").replace(/\n/g, "%0A").replace(/:/g, "%3A").replace(/,/g, "%2C"); - } - } -}); - -// node_modules/uuid/dist/esm-node/rng.js -function rng() { - if (poolPtr > rnds8Pool.length - 16) { - import_crypto.default.randomFillSync(rnds8Pool); - poolPtr = 0; - } - return rnds8Pool.slice(poolPtr, poolPtr += 16); -} -var import_crypto, rnds8Pool, poolPtr; -var init_rng = __esm({ - "node_modules/uuid/dist/esm-node/rng.js"() { - "use strict"; - import_crypto = __toESM(require("crypto")); - rnds8Pool = new Uint8Array(256); - poolPtr = rnds8Pool.length; - } -}); - -// node_modules/uuid/dist/esm-node/regex.js -var regex_default; -var init_regex = __esm({ - "node_modules/uuid/dist/esm-node/regex.js"() { - "use strict"; - regex_default = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i; - } -}); - -// node_modules/uuid/dist/esm-node/validate.js -function validate(uuid) { - return typeof uuid === "string" && regex_default.test(uuid); -} -var validate_default; -var init_validate = __esm({ - "node_modules/uuid/dist/esm-node/validate.js"() { - "use strict"; - init_regex(); - validate_default = validate; - } -}); - -// node_modules/uuid/dist/esm-node/stringify.js -function stringify(arr, offset = 0) { - const uuid = (byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + "-" + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + "-" + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + "-" + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + "-" + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]).toLowerCase(); - if (!validate_default(uuid)) { - throw TypeError("Stringified UUID is invalid"); - } - return uuid; -} -var byteToHex, stringify_default; -var init_stringify = __esm({ - "node_modules/uuid/dist/esm-node/stringify.js"() { - "use strict"; - init_validate(); - byteToHex = []; - for (let i2 = 0; i2 < 256; ++i2) { - byteToHex.push((i2 + 256).toString(16).substr(1)); - } - stringify_default = stringify; - } -}); - -// node_modules/uuid/dist/esm-node/v1.js -function v1(options, buf, offset) { - let i2 = buf && offset || 0; - const b = buf || new Array(16); - options = options || {}; - let node = options.node || _nodeId; - let clockseq = options.clockseq !== void 0 ? options.clockseq : _clockseq; - if (node == null || clockseq == null) { - const seedBytes = options.random || (options.rng || rng)(); - if (node == null) { - node = _nodeId = [seedBytes[0] | 1, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]]; - } - if (clockseq == null) { - clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 16383; - } - } - let msecs = options.msecs !== void 0 ? options.msecs : Date.now(); - let nsecs = options.nsecs !== void 0 ? options.nsecs : _lastNSecs + 1; - const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 1e4; - if (dt < 0 && options.clockseq === void 0) { - clockseq = clockseq + 1 & 16383; - } - if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === void 0) { - nsecs = 0; - } - if (nsecs >= 1e4) { - throw new Error("uuid.v1(): Can't create more than 10M uuids/sec"); - } - _lastMSecs = msecs; - _lastNSecs = nsecs; - _clockseq = clockseq; - msecs += 122192928e5; - const tl = ((msecs & 268435455) * 1e4 + nsecs) % 4294967296; - b[i2++] = tl >>> 24 & 255; - b[i2++] = tl >>> 16 & 255; - b[i2++] = tl >>> 8 & 255; - b[i2++] = tl & 255; - const tmh = msecs / 4294967296 * 1e4 & 268435455; - b[i2++] = tmh >>> 8 & 255; - b[i2++] = tmh & 255; - b[i2++] = tmh >>> 24 & 15 | 16; - b[i2++] = tmh >>> 16 & 255; - b[i2++] = clockseq >>> 8 | 128; - b[i2++] = clockseq & 255; - for (let n = 0; n < 6; ++n) { - b[i2 + n] = node[n]; - } - return buf || stringify_default(b); -} -var _nodeId, _clockseq, _lastMSecs, _lastNSecs, v1_default; -var init_v1 = __esm({ - "node_modules/uuid/dist/esm-node/v1.js"() { - "use strict"; - init_rng(); - init_stringify(); - _lastMSecs = 0; - _lastNSecs = 0; - v1_default = v1; - } -}); - -// node_modules/uuid/dist/esm-node/parse.js -function parse2(uuid) { - if (!validate_default(uuid)) { - throw TypeError("Invalid UUID"); - } - let v; - const arr = new Uint8Array(16); - arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24; - arr[1] = v >>> 16 & 255; - arr[2] = v >>> 8 & 255; - arr[3] = v & 255; - arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8; - arr[5] = v & 255; - arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8; - arr[7] = v & 255; - arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8; - arr[9] = v & 255; - arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 1099511627776 & 255; - arr[11] = v / 4294967296 & 255; - arr[12] = v >>> 24 & 255; - arr[13] = v >>> 16 & 255; - arr[14] = v >>> 8 & 255; - arr[15] = v & 255; - return arr; -} -var parse_default; -var init_parse2 = __esm({ - "node_modules/uuid/dist/esm-node/parse.js"() { - "use strict"; - init_validate(); - parse_default = parse2; - } -}); - -// node_modules/uuid/dist/esm-node/v35.js -function stringToBytes(str) { - str = unescape(encodeURIComponent(str)); - const bytes = []; - for (let i2 = 0; i2 < str.length; ++i2) { - bytes.push(str.charCodeAt(i2)); - } - return bytes; -} -function v35_default(name, version2, hashfunc) { - function generateUUID(value, namespace, buf, offset) { - if (typeof value === "string") { - value = stringToBytes(value); - } - if (typeof namespace === "string") { - namespace = parse_default(namespace); - } - if (namespace.length !== 16) { - throw TypeError("Namespace must be array-like (16 iterable integer values, 0-255)"); - } - let bytes = new Uint8Array(16 + value.length); - bytes.set(namespace); - bytes.set(value, namespace.length); - bytes = hashfunc(bytes); - bytes[6] = bytes[6] & 15 | version2; - bytes[8] = bytes[8] & 63 | 128; - if (buf) { - offset = offset || 0; - for (let i2 = 0; i2 < 16; ++i2) { - buf[offset + i2] = bytes[i2]; - } - return buf; - } - return stringify_default(bytes); - } - try { - generateUUID.name = name; - } catch (err) { - } - generateUUID.DNS = DNS; - generateUUID.URL = URL2; - return generateUUID; -} -var DNS, URL2; -var init_v35 = __esm({ - "node_modules/uuid/dist/esm-node/v35.js"() { - "use strict"; - init_stringify(); - init_parse2(); - DNS = "6ba7b810-9dad-11d1-80b4-00c04fd430c8"; - URL2 = "6ba7b811-9dad-11d1-80b4-00c04fd430c8"; - } -}); - -// node_modules/uuid/dist/esm-node/md5.js -function md5(bytes) { - if (Array.isArray(bytes)) { - bytes = Buffer.from(bytes); - } else if (typeof bytes === "string") { - bytes = Buffer.from(bytes, "utf8"); - } - return import_crypto2.default.createHash("md5").update(bytes).digest(); -} -var import_crypto2, md5_default; -var init_md5 = __esm({ - "node_modules/uuid/dist/esm-node/md5.js"() { - "use strict"; - import_crypto2 = __toESM(require("crypto")); - md5_default = md5; - } -}); - -// node_modules/uuid/dist/esm-node/v3.js -var v3, v3_default; -var init_v3 = __esm({ - "node_modules/uuid/dist/esm-node/v3.js"() { - "use strict"; - init_v35(); - init_md5(); - v3 = v35_default("v3", 48, md5_default); - v3_default = v3; - } -}); - -// node_modules/uuid/dist/esm-node/v4.js -function v4(options, buf, offset) { - options = options || {}; - const rnds = options.random || (options.rng || rng)(); - rnds[6] = rnds[6] & 15 | 64; - rnds[8] = rnds[8] & 63 | 128; - if (buf) { - offset = offset || 0; - for (let i2 = 0; i2 < 16; ++i2) { - buf[offset + i2] = rnds[i2]; + return (0, utils_1.toCommandValue)(s2).replace(/%/g, "%25").replace(/\r/g, "%0D").replace(/\n/g, "%0A").replace(/:/g, "%3A").replace(/,/g, "%2C"); } - return buf; - } - return stringify_default(rnds); -} -var v4_default; -var init_v4 = __esm({ - "node_modules/uuid/dist/esm-node/v4.js"() { - "use strict"; - init_rng(); - init_stringify(); - v4_default = v4; - } -}); - -// node_modules/uuid/dist/esm-node/sha1.js -function sha1(bytes) { - if (Array.isArray(bytes)) { - bytes = Buffer.from(bytes); - } else if (typeof bytes === "string") { - bytes = Buffer.from(bytes, "utf8"); - } - return import_crypto3.default.createHash("sha1").update(bytes).digest(); -} -var import_crypto3, sha1_default; -var init_sha1 = __esm({ - "node_modules/uuid/dist/esm-node/sha1.js"() { - "use strict"; - import_crypto3 = __toESM(require("crypto")); - sha1_default = sha1; - } -}); - -// node_modules/uuid/dist/esm-node/v5.js -var v5, v5_default; -var init_v5 = __esm({ - "node_modules/uuid/dist/esm-node/v5.js"() { - "use strict"; - init_v35(); - init_sha1(); - v5 = v35_default("v5", 80, sha1_default); - v5_default = v5; - } -}); - -// node_modules/uuid/dist/esm-node/nil.js -var nil_default; -var init_nil = __esm({ - "node_modules/uuid/dist/esm-node/nil.js"() { - "use strict"; - nil_default = "00000000-0000-0000-0000-000000000000"; - } -}); - -// node_modules/uuid/dist/esm-node/version.js -function version(uuid) { - if (!validate_default(uuid)) { - throw TypeError("Invalid UUID"); - } - return parseInt(uuid.substr(14, 1), 16); -} -var version_default; -var init_version4 = __esm({ - "node_modules/uuid/dist/esm-node/version.js"() { - "use strict"; - init_validate(); - version_default = version; - } -}); - -// node_modules/uuid/dist/esm-node/index.js -var esm_node_exports = {}; -__export(esm_node_exports, { - NIL: () => nil_default, - parse: () => parse_default, - stringify: () => stringify_default, - v1: () => v1_default, - v3: () => v3_default, - v4: () => v4_default, - v5: () => v5_default, - validate: () => validate_default, - version: () => version_default -}); -var init_esm_node = __esm({ - "node_modules/uuid/dist/esm-node/index.js"() { - "use strict"; - init_v1(); - init_v3(); - init_v4(); - init_v5(); - init_nil(); - init_version4(); - init_validate(); - init_stringify(); - init_parse2(); } }); @@ -39140,14 +38740,16 @@ var require_file_command = __commonJS({ "node_modules/@actions/core/lib/file-command.js"(exports2) { "use strict"; var __createBinding = exports2 && exports2.__createBinding || (Object.create ? function(o, m2, k, k2) { - if (k2 === void 0) - k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { - return m2[k]; - } }); + if (k2 === void 0) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m2, k); + if (!desc || ("get" in desc ? !m2.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { + return m2[k]; + } }; + } + Object.defineProperty(o, k2, desc); } : function(o, m2, k, k2) { - if (k2 === void 0) - k2 = k; + if (k2 === void 0) k2 = k; o[k2] = m2[k]; }); var __setModuleDefault = exports2 && exports2.__setModuleDefault || (Object.create ? function(o, v) { @@ -39156,22 +38758,19 @@ var require_file_command = __commonJS({ o["default"] = v; }); var __importStar = exports2 && exports2.__importStar || function(mod) { - if (mod && mod.__esModule) - return mod; + if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) { - for (var k in mod) - if (k !== "default" && Object.hasOwnProperty.call(mod, k)) - __createBinding(result, mod, k); + for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); } __setModuleDefault(result, mod); return result; }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.prepareKeyValueMessage = exports2.issueFileCommand = void 0; + var crypto = __importStar(require("crypto")); var fs2 = __importStar(require("fs")); var os2 = __importStar(require("os")); - var uuid_1 = (init_esm_node(), __toCommonJS(esm_node_exports)); var utils_1 = require_utils5(); function issueFileCommand(command, message) { const filePath = process.env[`GITHUB_${command}`]; @@ -39181,14 +38780,14 @@ var require_file_command = __commonJS({ if (!fs2.existsSync(filePath)) { throw new Error(`Missing file at path: ${filePath}`); } - fs2.appendFileSync(filePath, `${utils_1.toCommandValue(message)}${os2.EOL}`, { + fs2.appendFileSync(filePath, `${(0, utils_1.toCommandValue)(message)}${os2.EOL}`, { encoding: "utf8" }); } exports2.issueFileCommand = issueFileCommand; function prepareKeyValueMessage(key, value) { - const delimiter = `ghadelimiter_${uuid_1.v4()}`; - const convertedValue = utils_1.toCommandValue(value); + const delimiter = `ghadelimiter_${crypto.randomUUID()}`; + const convertedValue = (0, utils_1.toCommandValue)(value); if (key.includes(delimiter)) { throw new Error(`Unexpected input: name should not contain the delimiter "${delimiter}"`); } @@ -39389,9 +38988,9 @@ var require_oidc_utils = __commonJS({ const encodedAudience = encodeURIComponent(audience); id_token_url = `${id_token_url}&audience=${encodedAudience}`; } - core_1.debug(`ID token url is ${id_token_url}`); + (0, core_1.debug)(`ID token url is ${id_token_url}`); const id_token = yield _OidcClient.getCall(id_token_url); - core_1.setSecret(id_token); + (0, core_1.setSecret)(id_token); return id_token; } catch (error) { throw new Error(`Error message: ${error.message}`); @@ -39702,14 +39301,16 @@ var require_path_utils = __commonJS({ "node_modules/@actions/core/lib/path-utils.js"(exports2) { "use strict"; var __createBinding = exports2 && exports2.__createBinding || (Object.create ? function(o, m2, k, k2) { - if (k2 === void 0) - k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { - return m2[k]; - } }); + if (k2 === void 0) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m2, k); + if (!desc || ("get" in desc ? !m2.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { + return m2[k]; + } }; + } + Object.defineProperty(o, k2, desc); } : function(o, m2, k, k2) { - if (k2 === void 0) - k2 = k; + if (k2 === void 0) k2 = k; o[k2] = m2[k]; }); var __setModuleDefault = exports2 && exports2.__setModuleDefault || (Object.create ? function(o, v) { @@ -39718,13 +39319,10 @@ var require_path_utils = __commonJS({ o["default"] = v; }); var __importStar = exports2 && exports2.__importStar || function(mod) { - if (mod && mod.__esModule) - return mod; + if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) { - for (var k in mod) - if (k !== "default" && Object.hasOwnProperty.call(mod, k)) - __createBinding(result, mod, k); + for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); } __setModuleDefault(result, mod); return result; @@ -39747,19 +39345,17 @@ var require_path_utils = __commonJS({ } }); -// node_modules/@actions/core/lib/core.js -var require_core = __commonJS({ - "node_modules/@actions/core/lib/core.js"(exports2) { +// node_modules/@actions/io/lib/io-util.js +var require_io_util = __commonJS({ + "node_modules/@actions/io/lib/io-util.js"(exports2) { "use strict"; var __createBinding = exports2 && exports2.__createBinding || (Object.create ? function(o, m2, k, k2) { - if (k2 === void 0) - k2 = k; + if (k2 === void 0) k2 = k; Object.defineProperty(o, k2, { enumerable: true, get: function() { return m2[k]; } }); } : function(o, m2, k, k2) { - if (k2 === void 0) - k2 = k; + if (k2 === void 0) k2 = k; o[k2] = m2[k]; }); var __setModuleDefault = exports2 && exports2.__setModuleDefault || (Object.create ? function(o, v) { @@ -39768,13 +39364,10 @@ var require_core = __commonJS({ o["default"] = v; }); var __importStar = exports2 && exports2.__importStar || function(mod) { - if (mod && mod.__esModule) - return mod; + if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) { - for (var k in mod) - if (k !== "default" && Object.hasOwnProperty.call(mod, k)) - __createBinding(result, mod, k); + for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); } __setModuleDefault(result, mod); return result; @@ -39806,5626 +39399,6206 @@ var require_core = __commonJS({ step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; + var _a2; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.getIDToken = exports2.getState = exports2.saveState = exports2.group = exports2.endGroup = exports2.startGroup = exports2.info = exports2.notice = exports2.warning = exports2.error = exports2.debug = exports2.isDebug = exports2.setFailed = exports2.setCommandEcho = exports2.setOutput = exports2.getBooleanInput = exports2.getMultilineInput = exports2.getInput = exports2.addPath = exports2.setSecret = exports2.exportVariable = exports2.ExitCode = void 0; - var command_1 = require_command(); - var file_command_1 = require_file_command(); - var utils_1 = require_utils5(); - var os2 = __importStar(require("os")); + exports2.getCmdPath = exports2.tryGetExecutablePath = exports2.isRooted = exports2.isDirectory = exports2.exists = exports2.READONLY = exports2.UV_FS_O_EXLOCK = exports2.IS_WINDOWS = exports2.unlink = exports2.symlink = exports2.stat = exports2.rmdir = exports2.rm = exports2.rename = exports2.readlink = exports2.readdir = exports2.open = exports2.mkdir = exports2.lstat = exports2.copyFile = exports2.chmod = void 0; + var fs2 = __importStar(require("fs")); var path2 = __importStar(require("path")); - var oidc_utils_1 = require_oidc_utils(); - var ExitCode; - (function(ExitCode2) { - ExitCode2[ExitCode2["Success"] = 0] = "Success"; - ExitCode2[ExitCode2["Failure"] = 1] = "Failure"; - })(ExitCode = exports2.ExitCode || (exports2.ExitCode = {})); - function exportVariable(name, val) { - const convertedVal = utils_1.toCommandValue(val); - process.env[name] = convertedVal; - const filePath = process.env["GITHUB_ENV"] || ""; - if (filePath) { - return file_command_1.issueFileCommand("ENV", file_command_1.prepareKeyValueMessage(name, val)); - } - command_1.issueCommand("set-env", { name }, convertedVal); - } - exports2.exportVariable = exportVariable; - function setSecret(secret) { - command_1.issueCommand("add-mask", {}, secret); + _a2 = fs2.promises, exports2.chmod = _a2.chmod, exports2.copyFile = _a2.copyFile, exports2.lstat = _a2.lstat, exports2.mkdir = _a2.mkdir, exports2.open = _a2.open, exports2.readdir = _a2.readdir, exports2.readlink = _a2.readlink, exports2.rename = _a2.rename, exports2.rm = _a2.rm, exports2.rmdir = _a2.rmdir, exports2.stat = _a2.stat, exports2.symlink = _a2.symlink, exports2.unlink = _a2.unlink; + exports2.IS_WINDOWS = process.platform === "win32"; + exports2.UV_FS_O_EXLOCK = 268435456; + exports2.READONLY = fs2.constants.O_RDONLY; + function exists2(fsPath) { + return __awaiter(this, void 0, void 0, function* () { + try { + yield exports2.stat(fsPath); + } catch (err) { + if (err.code === "ENOENT") { + return false; + } + throw err; + } + return true; + }); } - exports2.setSecret = setSecret; - function addPath(inputPath) { - const filePath = process.env["GITHUB_PATH"] || ""; - if (filePath) { - file_command_1.issueFileCommand("PATH", inputPath); - } else { - command_1.issueCommand("add-path", {}, inputPath); - } - process.env["PATH"] = `${inputPath}${path2.delimiter}${process.env["PATH"]}`; + exports2.exists = exists2; + function isDirectory(fsPath, useStat = false) { + return __awaiter(this, void 0, void 0, function* () { + const stats = useStat ? yield exports2.stat(fsPath) : yield exports2.lstat(fsPath); + return stats.isDirectory(); + }); } - exports2.addPath = addPath; - function getInput2(name, options) { - const val = process.env[`INPUT_${name.replace(/ /g, "_").toUpperCase()}`] || ""; - if (options && options.required && !val) { - throw new Error(`Input required and not supplied: ${name}`); - } - if (options && options.trimWhitespace === false) { - return val; + exports2.isDirectory = isDirectory; + function isRooted(p) { + p = normalizeSeparators(p); + if (!p) { + throw new Error('isRooted() parameter "p" cannot be empty'); } - return val.trim(); - } - exports2.getInput = getInput2; - function getMultilineInput2(name, options) { - const inputs = getInput2(name, options).split("\n").filter((x2) => x2 !== ""); - if (options && options.trimWhitespace === false) { - return inputs; + if (exports2.IS_WINDOWS) { + return p.startsWith("\\") || /^[A-Z]:/i.test(p); } - return inputs.map((input) => input.trim()); + return p.startsWith("/"); } - exports2.getMultilineInput = getMultilineInput2; - function getBooleanInput(name, options) { - const trueValue = ["true", "True", "TRUE"]; - const falseValue = ["false", "False", "FALSE"]; - const val = getInput2(name, options); - if (trueValue.includes(val)) - return true; - if (falseValue.includes(val)) - return false; - throw new TypeError(`Input does not meet YAML 1.2 "Core Schema" specification: ${name} -Support boolean input list: \`true | True | TRUE | false | False | FALSE\``); + exports2.isRooted = isRooted; + function tryGetExecutablePath(filePath, extensions) { + return __awaiter(this, void 0, void 0, function* () { + let stats = void 0; + try { + stats = yield exports2.stat(filePath); + } catch (err) { + if (err.code !== "ENOENT") { + console.log(`Unexpected error attempting to determine if executable file exists '${filePath}': ${err}`); + } + } + if (stats && stats.isFile()) { + if (exports2.IS_WINDOWS) { + const upperExt = path2.extname(filePath).toUpperCase(); + if (extensions.some((validExt) => validExt.toUpperCase() === upperExt)) { + return filePath; + } + } else { + if (isUnixExecutable(stats)) { + return filePath; + } + } + } + const originalFilePath = filePath; + for (const extension of extensions) { + filePath = originalFilePath + extension; + stats = void 0; + try { + stats = yield exports2.stat(filePath); + } catch (err) { + if (err.code !== "ENOENT") { + console.log(`Unexpected error attempting to determine if executable file exists '${filePath}': ${err}`); + } + } + if (stats && stats.isFile()) { + if (exports2.IS_WINDOWS) { + try { + const directory = path2.dirname(filePath); + const upperName = path2.basename(filePath).toUpperCase(); + for (const actualName of yield exports2.readdir(directory)) { + if (upperName === actualName.toUpperCase()) { + filePath = path2.join(directory, actualName); + break; + } + } + } catch (err) { + console.log(`Unexpected error attempting to determine the actual case of the file '${filePath}': ${err}`); + } + return filePath; + } else { + if (isUnixExecutable(stats)) { + return filePath; + } + } + } + } + return ""; + }); } - exports2.getBooleanInput = getBooleanInput; - function setOutput(name, value) { - const filePath = process.env["GITHUB_OUTPUT"] || ""; - if (filePath) { - return file_command_1.issueFileCommand("OUTPUT", file_command_1.prepareKeyValueMessage(name, value)); + exports2.tryGetExecutablePath = tryGetExecutablePath; + function normalizeSeparators(p) { + p = p || ""; + if (exports2.IS_WINDOWS) { + p = p.replace(/\//g, "\\"); + return p.replace(/\\\\+/g, "\\"); } - process.stdout.write(os2.EOL); - command_1.issueCommand("set-output", { name }, utils_1.toCommandValue(value)); - } - exports2.setOutput = setOutput; - function setCommandEcho(enabled) { - command_1.issue("echo", enabled ? "on" : "off"); - } - exports2.setCommandEcho = setCommandEcho; - function setFailed(message) { - process.exitCode = ExitCode.Failure; - error(message); - } - exports2.setFailed = setFailed; - function isDebug() { - return process.env["RUNNER_DEBUG"] === "1"; + return p.replace(/\/\/+/g, "/"); } - exports2.isDebug = isDebug; - function debug5(message) { - command_1.issueCommand("debug", {}, message); + function isUnixExecutable(stats) { + return (stats.mode & 1) > 0 || (stats.mode & 8) > 0 && stats.gid === process.getgid() || (stats.mode & 64) > 0 && stats.uid === process.getuid(); } - exports2.debug = debug5; - function error(message, properties = {}) { - command_1.issueCommand("error", utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message); + function getCmdPath() { + var _a3; + return (_a3 = process.env["COMSPEC"]) !== null && _a3 !== void 0 ? _a3 : `cmd.exe`; } - exports2.error = error; - function warning(message, properties = {}) { - command_1.issueCommand("warning", utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message); + exports2.getCmdPath = getCmdPath; + } +}); + +// node_modules/@actions/io/lib/io.js +var require_io = __commonJS({ + "node_modules/@actions/io/lib/io.js"(exports2) { + "use strict"; + var __createBinding = exports2 && exports2.__createBinding || (Object.create ? function(o, m2, k, k2) { + if (k2 === void 0) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { + return m2[k]; + } }); + } : function(o, m2, k, k2) { + if (k2 === void 0) k2 = k; + o[k2] = m2[k]; + }); + var __setModuleDefault = exports2 && exports2.__setModuleDefault || (Object.create ? function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); + } : function(o, v) { + o["default"] = v; + }); + var __importStar = exports2 && exports2.__importStar || function(mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) { + for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + } + __setModuleDefault(result, mod); + return result; + }; + var __awaiter = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { + function adopt(value) { + return value instanceof P ? value : new P(function(resolve) { + resolve(value); + }); + } + return new (P || (P = Promise))(function(resolve, reject) { + function fulfilled(value) { + try { + step(generator.next(value)); + } catch (e2) { + reject(e2); + } + } + function rejected(value) { + try { + step(generator["throw"](value)); + } catch (e2) { + reject(e2); + } + } + function step(result) { + result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); + } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + }; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.findInPath = exports2.which = exports2.mkdirP = exports2.rmRF = exports2.mv = exports2.cp = void 0; + var assert_1 = require("assert"); + var path2 = __importStar(require("path")); + var ioUtil = __importStar(require_io_util()); + function cp(source, dest, options = {}) { + return __awaiter(this, void 0, void 0, function* () { + const { force, recursive, copySourceDirectory } = readCopyOptions(options); + const destStat = (yield ioUtil.exists(dest)) ? yield ioUtil.stat(dest) : null; + if (destStat && destStat.isFile() && !force) { + return; + } + const newDest = destStat && destStat.isDirectory() && copySourceDirectory ? path2.join(dest, path2.basename(source)) : dest; + if (!(yield ioUtil.exists(source))) { + throw new Error(`no such file or directory: ${source}`); + } + const sourceStat = yield ioUtil.stat(source); + if (sourceStat.isDirectory()) { + if (!recursive) { + throw new Error(`Failed to copy. ${source} is a directory, but tried to copy without recursive flag.`); + } else { + yield cpDirRecursive(source, newDest, 0, force); + } + } else { + if (path2.relative(source, newDest) === "") { + throw new Error(`'${newDest}' and '${source}' are the same file`); + } + yield copyFile(source, newDest, force); + } + }); } - exports2.warning = warning; - function notice(message, properties = {}) { - command_1.issueCommand("notice", utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message); + exports2.cp = cp; + function mv(source, dest, options = {}) { + return __awaiter(this, void 0, void 0, function* () { + if (yield ioUtil.exists(dest)) { + let destExists = true; + if (yield ioUtil.isDirectory(dest)) { + dest = path2.join(dest, path2.basename(source)); + destExists = yield ioUtil.exists(dest); + } + if (destExists) { + if (options.force == null || options.force) { + yield rmRF(dest); + } else { + throw new Error("Destination already exists"); + } + } + } + yield mkdirP(path2.dirname(dest)); + yield ioUtil.rename(source, dest); + }); } - exports2.notice = notice; - function info(message) { - process.stdout.write(message + os2.EOL); + exports2.mv = mv; + function rmRF(inputPath) { + return __awaiter(this, void 0, void 0, function* () { + if (ioUtil.IS_WINDOWS) { + if (/[*"<>|]/.test(inputPath)) { + throw new Error('File path must not contain `*`, `"`, `<`, `>` or `|` on Windows'); + } + } + try { + yield ioUtil.rm(inputPath, { + force: true, + maxRetries: 3, + recursive: true, + retryDelay: 300 + }); + } catch (err) { + throw new Error(`File was unable to be removed ${err}`); + } + }); } - exports2.info = info; - function startGroup(name) { - command_1.issue("group", name); + exports2.rmRF = rmRF; + function mkdirP(fsPath) { + return __awaiter(this, void 0, void 0, function* () { + assert_1.ok(fsPath, "a path argument must be provided"); + yield ioUtil.mkdir(fsPath, { recursive: true }); + }); } - exports2.startGroup = startGroup; - function endGroup() { - command_1.issue("endgroup"); + exports2.mkdirP = mkdirP; + function which(tool, check) { + return __awaiter(this, void 0, void 0, function* () { + if (!tool) { + throw new Error("parameter 'tool' is required"); + } + if (check) { + const result = yield which(tool, false); + if (!result) { + if (ioUtil.IS_WINDOWS) { + throw new Error(`Unable to locate executable file: ${tool}. Please verify either the file path exists or the file can be found within a directory specified by the PATH environment variable. Also verify the file has a valid extension for an executable file.`); + } else { + throw new Error(`Unable to locate executable file: ${tool}. Please verify either the file path exists or the file can be found within a directory specified by the PATH environment variable. Also check the file mode to verify the file is executable.`); + } + } + return result; + } + const matches = yield findInPath(tool); + if (matches && matches.length > 0) { + return matches[0]; + } + return ""; + }); } - exports2.endGroup = endGroup; - function group(name, fn) { + exports2.which = which; + function findInPath(tool) { return __awaiter(this, void 0, void 0, function* () { - startGroup(name); - let result; - try { - result = yield fn(); - } finally { - endGroup(); + if (!tool) { + throw new Error("parameter 'tool' is required"); } - return result; + const extensions = []; + if (ioUtil.IS_WINDOWS && process.env["PATHEXT"]) { + for (const extension of process.env["PATHEXT"].split(path2.delimiter)) { + if (extension) { + extensions.push(extension); + } + } + } + if (ioUtil.isRooted(tool)) { + const filePath = yield ioUtil.tryGetExecutablePath(tool, extensions); + if (filePath) { + return [filePath]; + } + return []; + } + if (tool.includes(path2.sep)) { + return []; + } + const directories = []; + if (process.env.PATH) { + for (const p of process.env.PATH.split(path2.delimiter)) { + if (p) { + directories.push(p); + } + } + } + const matches = []; + for (const directory of directories) { + const filePath = yield ioUtil.tryGetExecutablePath(path2.join(directory, tool), extensions); + if (filePath) { + matches.push(filePath); + } + } + return matches; }); } - exports2.group = group; - function saveState(name, value) { - const filePath = process.env["GITHUB_STATE"] || ""; - if (filePath) { - return file_command_1.issueFileCommand("STATE", file_command_1.prepareKeyValueMessage(name, value)); - } - command_1.issueCommand("save-state", { name }, utils_1.toCommandValue(value)); + exports2.findInPath = findInPath; + function readCopyOptions(options) { + const force = options.force == null ? true : options.force; + const recursive = Boolean(options.recursive); + const copySourceDirectory = options.copySourceDirectory == null ? true : Boolean(options.copySourceDirectory); + return { force, recursive, copySourceDirectory }; } - exports2.saveState = saveState; - function getState(name) { - return process.env[`STATE_${name}`] || ""; + function cpDirRecursive(sourceDir, destDir, currentDepth, force) { + return __awaiter(this, void 0, void 0, function* () { + if (currentDepth >= 255) + return; + currentDepth++; + yield mkdirP(destDir); + const files = yield ioUtil.readdir(sourceDir); + for (const fileName of files) { + const srcFile = `${sourceDir}/${fileName}`; + const destFile = `${destDir}/${fileName}`; + const srcFileStat = yield ioUtil.lstat(srcFile); + if (srcFileStat.isDirectory()) { + yield cpDirRecursive(srcFile, destFile, currentDepth, force); + } else { + yield copyFile(srcFile, destFile, force); + } + } + yield ioUtil.chmod(destDir, (yield ioUtil.stat(sourceDir)).mode); + }); } - exports2.getState = getState; - function getIDToken(aud) { + function copyFile(srcFile, destFile, force) { return __awaiter(this, void 0, void 0, function* () { - return yield oidc_utils_1.OidcClient.getIDToken(aud); + if ((yield ioUtil.lstat(srcFile)).isSymbolicLink()) { + try { + yield ioUtil.lstat(destFile); + yield ioUtil.unlink(destFile); + } catch (e2) { + if (e2.code === "EPERM") { + yield ioUtil.chmod(destFile, "0666"); + yield ioUtil.unlink(destFile); + } + } + const symlinkFull = yield ioUtil.readlink(srcFile); + yield ioUtil.symlink(symlinkFull, destFile, ioUtil.IS_WINDOWS ? "junction" : null); + } else if (!(yield ioUtil.exists(destFile)) || force) { + yield ioUtil.copyFile(srcFile, destFile); + } }); } - exports2.getIDToken = getIDToken; - var summary_1 = require_summary(); - Object.defineProperty(exports2, "summary", { enumerable: true, get: function() { - return summary_1.summary; - } }); - var summary_2 = require_summary(); - Object.defineProperty(exports2, "markdownSummary", { enumerable: true, get: function() { - return summary_2.markdownSummary; - } }); - var path_utils_1 = require_path_utils(); - Object.defineProperty(exports2, "toPosixPath", { enumerable: true, get: function() { - return path_utils_1.toPosixPath; - } }); - Object.defineProperty(exports2, "toWin32Path", { enumerable: true, get: function() { - return path_utils_1.toWin32Path; - } }); - Object.defineProperty(exports2, "toPlatformPath", { enumerable: true, get: function() { - return path_utils_1.toPlatformPath; - } }); } }); -// node_modules/ms/index.js -var require_ms = __commonJS({ - "node_modules/ms/index.js"(exports2, module2) { +// node_modules/@actions/exec/lib/toolrunner.js +var require_toolrunner = __commonJS({ + "node_modules/@actions/exec/lib/toolrunner.js"(exports2) { "use strict"; - var s2 = 1e3; - var m2 = s2 * 60; - var h2 = m2 * 60; - var d = h2 * 24; - var w = d * 7; - var y = d * 365.25; - module2.exports = function(val, options) { - options = options || {}; - var type = typeof val; - if (type === "string" && val.length > 0) { - return parse3(val); - } else if (type === "number" && isFinite(val)) { - return options.long ? fmtLong(val) : fmtShort(val); + var __createBinding = exports2 && exports2.__createBinding || (Object.create ? function(o, m2, k, k2) { + if (k2 === void 0) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { + return m2[k]; + } }); + } : function(o, m2, k, k2) { + if (k2 === void 0) k2 = k; + o[k2] = m2[k]; + }); + var __setModuleDefault = exports2 && exports2.__setModuleDefault || (Object.create ? function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); + } : function(o, v) { + o["default"] = v; + }); + var __importStar = exports2 && exports2.__importStar || function(mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) { + for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); } - throw new Error( - "val is not a non-empty string or a valid number. val=" + JSON.stringify(val) - ); + __setModuleDefault(result, mod); + return result; }; - function parse3(str) { - str = String(str); - if (str.length > 100) { - return; + var __awaiter = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { + function adopt(value) { + return value instanceof P ? value : new P(function(resolve) { + resolve(value); + }); } - var match2 = /^(-?(?:\d+)?\.?\d+) *(milliseconds?|msecs?|ms|seconds?|secs?|s|minutes?|mins?|m|hours?|hrs?|h|days?|d|weeks?|w|years?|yrs?|y)?$/i.exec( - str - ); - if (!match2) { - return; + return new (P || (P = Promise))(function(resolve, reject) { + function fulfilled(value) { + try { + step(generator.next(value)); + } catch (e2) { + reject(e2); + } + } + function rejected(value) { + try { + step(generator["throw"](value)); + } catch (e2) { + reject(e2); + } + } + function step(result) { + result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); + } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + }; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.argStringToArray = exports2.ToolRunner = void 0; + var os2 = __importStar(require("os")); + var events = __importStar(require("events")); + var child = __importStar(require("child_process")); + var path2 = __importStar(require("path")); + var io = __importStar(require_io()); + var ioUtil = __importStar(require_io_util()); + var timers_1 = require("timers"); + var IS_WINDOWS = process.platform === "win32"; + var ToolRunner = class extends events.EventEmitter { + constructor(toolPath, args, options) { + super(); + if (!toolPath) { + throw new Error("Parameter 'toolPath' cannot be null or empty."); + } + this.toolPath = toolPath; + this.args = args || []; + this.options = options || {}; } - var n = parseFloat(match2[1]); - var type = (match2[2] || "ms").toLowerCase(); - switch (type) { - case "years": - case "year": - case "yrs": - case "yr": - case "y": - return n * y; - case "weeks": - case "week": - case "w": - return n * w; - case "days": - case "day": - case "d": - return n * d; - case "hours": - case "hour": - case "hrs": - case "hr": - case "h": - return n * h2; - case "minutes": - case "minute": - case "mins": - case "min": - case "m": - return n * m2; - case "seconds": - case "second": - case "secs": - case "sec": - case "s": - return n * s2; - case "milliseconds": - case "millisecond": - case "msecs": - case "msec": - case "ms": - return n; - default: - return void 0; + _debug(message) { + if (this.options.listeners && this.options.listeners.debug) { + this.options.listeners.debug(message); + } } - } - function fmtShort(ms) { - var msAbs = Math.abs(ms); - if (msAbs >= d) { - return Math.round(ms / d) + "d"; + _getCommandString(options, noPrefix) { + const toolPath = this._getSpawnFileName(); + const args = this._getSpawnArgs(options); + let cmd = noPrefix ? "" : "[command]"; + if (IS_WINDOWS) { + if (this._isCmdFile()) { + cmd += toolPath; + for (const a of args) { + cmd += ` ${a}`; + } + } else if (options.windowsVerbatimArguments) { + cmd += `"${toolPath}"`; + for (const a of args) { + cmd += ` ${a}`; + } + } else { + cmd += this._windowsQuoteCmdArg(toolPath); + for (const a of args) { + cmd += ` ${this._windowsQuoteCmdArg(a)}`; + } + } + } else { + cmd += toolPath; + for (const a of args) { + cmd += ` ${a}`; + } + } + return cmd; } - if (msAbs >= h2) { - return Math.round(ms / h2) + "h"; + _processLineBuffer(data, strBuffer, onLine) { + try { + let s2 = strBuffer + data.toString(); + let n = s2.indexOf(os2.EOL); + while (n > -1) { + const line = s2.substring(0, n); + onLine(line); + s2 = s2.substring(n + os2.EOL.length); + n = s2.indexOf(os2.EOL); + } + return s2; + } catch (err) { + this._debug(`error processing line. Failed with error ${err}`); + return ""; + } } - if (msAbs >= m2) { - return Math.round(ms / m2) + "m"; + _getSpawnFileName() { + if (IS_WINDOWS) { + if (this._isCmdFile()) { + return process.env["COMSPEC"] || "cmd.exe"; + } + } + return this.toolPath; } - if (msAbs >= s2) { - return Math.round(ms / s2) + "s"; + _getSpawnArgs(options) { + if (IS_WINDOWS) { + if (this._isCmdFile()) { + let argline = `/D /S /C "${this._windowsQuoteCmdArg(this.toolPath)}`; + for (const a of this.args) { + argline += " "; + argline += options.windowsVerbatimArguments ? a : this._windowsQuoteCmdArg(a); + } + argline += '"'; + return [argline]; + } + } + return this.args; } - return ms + "ms"; - } - function fmtLong(ms) { - var msAbs = Math.abs(ms); - if (msAbs >= d) { - return plural(ms, msAbs, d, "day"); + _endsWith(str, end) { + return str.endsWith(end); } - if (msAbs >= h2) { - return plural(ms, msAbs, h2, "hour"); + _isCmdFile() { + const upperToolPath = this.toolPath.toUpperCase(); + return this._endsWith(upperToolPath, ".CMD") || this._endsWith(upperToolPath, ".BAT"); } - if (msAbs >= m2) { - return plural(ms, msAbs, m2, "minute"); + _windowsQuoteCmdArg(arg) { + if (!this._isCmdFile()) { + return this._uvQuoteCmdArg(arg); + } + if (!arg) { + return '""'; + } + const cmdSpecialChars = [ + " ", + " ", + "&", + "(", + ")", + "[", + "]", + "{", + "}", + "^", + "=", + ";", + "!", + "'", + "+", + ",", + "`", + "~", + "|", + "<", + ">", + '"' + ]; + let needsQuotes = false; + for (const char of arg) { + if (cmdSpecialChars.some((x2) => x2 === char)) { + needsQuotes = true; + break; + } + } + if (!needsQuotes) { + return arg; + } + let reverse = '"'; + let quoteHit = true; + for (let i2 = arg.length; i2 > 0; i2--) { + reverse += arg[i2 - 1]; + if (quoteHit && arg[i2 - 1] === "\\") { + reverse += "\\"; + } else if (arg[i2 - 1] === '"') { + quoteHit = true; + reverse += '"'; + } else { + quoteHit = false; + } + } + reverse += '"'; + return reverse.split("").reverse().join(""); } - if (msAbs >= s2) { - return plural(ms, msAbs, s2, "second"); + _uvQuoteCmdArg(arg) { + if (!arg) { + return '""'; + } + if (!arg.includes(" ") && !arg.includes(" ") && !arg.includes('"')) { + return arg; + } + if (!arg.includes('"') && !arg.includes("\\")) { + return `"${arg}"`; + } + let reverse = '"'; + let quoteHit = true; + for (let i2 = arg.length; i2 > 0; i2--) { + reverse += arg[i2 - 1]; + if (quoteHit && arg[i2 - 1] === "\\") { + reverse += "\\"; + } else if (arg[i2 - 1] === '"') { + quoteHit = true; + reverse += "\\"; + } else { + quoteHit = false; + } + } + reverse += '"'; + return reverse.split("").reverse().join(""); + } + _cloneExecOptions(options) { + options = options || {}; + const result = { + cwd: options.cwd || process.cwd(), + env: options.env || process.env, + silent: options.silent || false, + windowsVerbatimArguments: options.windowsVerbatimArguments || false, + failOnStdErr: options.failOnStdErr || false, + ignoreReturnCode: options.ignoreReturnCode || false, + delay: options.delay || 1e4 + }; + result.outStream = options.outStream || process.stdout; + result.errStream = options.errStream || process.stderr; + return result; } - return ms + " ms"; - } - function plural(ms, msAbs, n, name) { - var isPlural = msAbs >= n * 1.5; - return Math.round(ms / n) + " " + name + (isPlural ? "s" : ""); - } - } -}); - -// node_modules/debug/src/common.js -var require_common = __commonJS({ - "node_modules/debug/src/common.js"(exports2, module2) { - "use strict"; - function setup(env2) { - createDebug.debug = createDebug; - createDebug.default = createDebug; - createDebug.coerce = coerce; - createDebug.disable = disable; - createDebug.enable = enable; - createDebug.enabled = enabled; - createDebug.humanize = require_ms(); - createDebug.destroy = destroy; - Object.keys(env2).forEach((key) => { - createDebug[key] = env2[key]; - }); - createDebug.names = []; - createDebug.skips = []; - createDebug.formatters = {}; - function selectColor(namespace) { - let hash = 0; - for (let i2 = 0; i2 < namespace.length; i2++) { - hash = (hash << 5) - hash + namespace.charCodeAt(i2); - hash |= 0; + _getSpawnOptions(options, toolPath) { + options = options || {}; + const result = {}; + result.cwd = options.cwd; + result.env = options.env; + result["windowsVerbatimArguments"] = options.windowsVerbatimArguments || this._isCmdFile(); + if (options.windowsVerbatimArguments) { + result.argv0 = `"${toolPath}"`; } - return createDebug.colors[Math.abs(hash) % createDebug.colors.length]; + return result; } - createDebug.selectColor = selectColor; - function createDebug(namespace) { - let prevTime; - let enableOverride = null; - let namespacesCache; - let enabledCache; - function debug5(...args) { - if (!debug5.enabled) { - return; - } - const self2 = debug5; - const curr = Number(/* @__PURE__ */ new Date()); - const ms = curr - (prevTime || curr); - self2.diff = ms; - self2.prev = prevTime; - self2.curr = curr; - prevTime = curr; - args[0] = createDebug.coerce(args[0]); - if (typeof args[0] !== "string") { - args.unshift("%O"); + /** + * Exec a tool. + * Output will be streamed to the live console. + * Returns promise with return code + * + * @param tool path to tool to exec + * @param options optional exec options. See ExecOptions + * @returns number + */ + exec() { + return __awaiter(this, void 0, void 0, function* () { + if (!ioUtil.isRooted(this.toolPath) && (this.toolPath.includes("/") || IS_WINDOWS && this.toolPath.includes("\\"))) { + this.toolPath = path2.resolve(process.cwd(), this.options.cwd || process.cwd(), this.toolPath); } - let index = 0; - args[0] = args[0].replace(/%([a-zA-Z%])/g, (match2, format) => { - if (match2 === "%%") { - return "%"; - } - index++; - const formatter = createDebug.formatters[format]; - if (typeof formatter === "function") { - const val = args[index]; - match2 = formatter.call(self2, val); - args.splice(index, 1); - index--; + this.toolPath = yield io.which(this.toolPath, true); + return new Promise((resolve, reject) => __awaiter(this, void 0, void 0, function* () { + this._debug(`exec tool: ${this.toolPath}`); + this._debug("arguments:"); + for (const arg of this.args) { + this._debug(` ${arg}`); + } + const optionsNonNull = this._cloneExecOptions(this.options); + if (!optionsNonNull.silent && optionsNonNull.outStream) { + optionsNonNull.outStream.write(this._getCommandString(optionsNonNull) + os2.EOL); + } + const state = new ExecState(optionsNonNull, this.toolPath); + state.on("debug", (message) => { + this._debug(message); + }); + if (this.options.cwd && !(yield ioUtil.exists(this.options.cwd))) { + return reject(new Error(`The cwd: ${this.options.cwd} does not exist!`)); + } + const fileName = this._getSpawnFileName(); + const cp = child.spawn(fileName, this._getSpawnArgs(optionsNonNull), this._getSpawnOptions(this.options, fileName)); + let stdbuffer = ""; + if (cp.stdout) { + cp.stdout.on("data", (data) => { + if (this.options.listeners && this.options.listeners.stdout) { + this.options.listeners.stdout(data); + } + if (!optionsNonNull.silent && optionsNonNull.outStream) { + optionsNonNull.outStream.write(data); + } + stdbuffer = this._processLineBuffer(data, stdbuffer, (line) => { + if (this.options.listeners && this.options.listeners.stdline) { + this.options.listeners.stdline(line); + } + }); + }); } - return match2; - }); - createDebug.formatArgs.call(self2, args); - const logFn = self2.log || createDebug.log; - logFn.apply(self2, args); - } - debug5.namespace = namespace; - debug5.useColors = createDebug.useColors(); - debug5.color = createDebug.selectColor(namespace); - debug5.extend = extend; - debug5.destroy = createDebug.destroy; - Object.defineProperty(debug5, "enabled", { - enumerable: true, - configurable: false, - get: () => { - if (enableOverride !== null) { - return enableOverride; + let errbuffer = ""; + if (cp.stderr) { + cp.stderr.on("data", (data) => { + state.processStderr = true; + if (this.options.listeners && this.options.listeners.stderr) { + this.options.listeners.stderr(data); + } + if (!optionsNonNull.silent && optionsNonNull.errStream && optionsNonNull.outStream) { + const s2 = optionsNonNull.failOnStdErr ? optionsNonNull.errStream : optionsNonNull.outStream; + s2.write(data); + } + errbuffer = this._processLineBuffer(data, errbuffer, (line) => { + if (this.options.listeners && this.options.listeners.errline) { + this.options.listeners.errline(line); + } + }); + }); } - if (namespacesCache !== createDebug.namespaces) { - namespacesCache = createDebug.namespaces; - enabledCache = createDebug.enabled(namespace); + cp.on("error", (err) => { + state.processError = err.message; + state.processExited = true; + state.processClosed = true; + state.CheckComplete(); + }); + cp.on("exit", (code) => { + state.processExitCode = code; + state.processExited = true; + this._debug(`Exit code ${code} received from tool '${this.toolPath}'`); + state.CheckComplete(); + }); + cp.on("close", (code) => { + state.processExitCode = code; + state.processExited = true; + state.processClosed = true; + this._debug(`STDIO streams have closed for tool '${this.toolPath}'`); + state.CheckComplete(); + }); + state.on("done", (error, exitCode) => { + if (stdbuffer.length > 0) { + this.emit("stdline", stdbuffer); + } + if (errbuffer.length > 0) { + this.emit("errline", errbuffer); + } + cp.removeAllListeners(); + if (error) { + reject(error); + } else { + resolve(exitCode); + } + }); + if (this.options.input) { + if (!cp.stdin) { + throw new Error("child process missing stdin"); + } + cp.stdin.end(this.options.input); } - return enabledCache; - }, - set: (v) => { - enableOverride = v; - } + })); }); - if (typeof createDebug.init === "function") { - createDebug.init(debug5); - } - return debug5; } - function extend(namespace, delimiter) { - const newDebug = createDebug(this.namespace + (typeof delimiter === "undefined" ? ":" : delimiter) + namespace); - newDebug.log = this.log; - return newDebug; + }; + exports2.ToolRunner = ToolRunner; + function argStringToArray(argString) { + const args = []; + let inQuotes = false; + let escaped = false; + let arg = ""; + function append2(c) { + if (escaped && c !== '"') { + arg += "\\"; + } + arg += c; + escaped = false; } - function enable(namespaces) { - createDebug.save(namespaces); - createDebug.namespaces = namespaces; - createDebug.names = []; - createDebug.skips = []; - let i2; - const split = (typeof namespaces === "string" ? namespaces : "").split(/[\s,]+/); - const len = split.length; - for (i2 = 0; i2 < len; i2++) { - if (!split[i2]) { - continue; - } - namespaces = split[i2].replace(/\*/g, ".*?"); - if (namespaces[0] === "-") { - createDebug.skips.push(new RegExp("^" + namespaces.slice(1) + "$")); + for (let i2 = 0; i2 < argString.length; i2++) { + const c = argString.charAt(i2); + if (c === '"') { + if (!escaped) { + inQuotes = !inQuotes; } else { - createDebug.names.push(new RegExp("^" + namespaces + "$")); + append2(c); } + continue; } - } - function disable() { - const namespaces = [ - ...createDebug.names.map(toNamespace), - ...createDebug.skips.map(toNamespace).map((namespace) => "-" + namespace) - ].join(","); - createDebug.enable(""); - return namespaces; - } - function enabled(name) { - if (name[name.length - 1] === "*") { - return true; + if (c === "\\" && escaped) { + append2(c); + continue; } - let i2; - let len; - for (i2 = 0, len = createDebug.skips.length; i2 < len; i2++) { - if (createDebug.skips[i2].test(name)) { - return false; - } + if (c === "\\" && inQuotes) { + escaped = true; + continue; } - for (i2 = 0, len = createDebug.names.length; i2 < len; i2++) { - if (createDebug.names[i2].test(name)) { - return true; + if (c === " " && !inQuotes) { + if (arg.length > 0) { + args.push(arg); + arg = ""; } + continue; } - return false; - } - function toNamespace(regexp) { - return regexp.toString().substring(2, regexp.toString().length - 2).replace(/\.\*\?$/, "*"); - } - function coerce(val) { - if (val instanceof Error) { - return val.stack || val.message; - } - return val; + append2(c); } - function destroy() { - console.warn("Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`."); + if (arg.length > 0) { + args.push(arg.trim()); } - createDebug.enable(createDebug.load()); - return createDebug; + return args; } - module2.exports = setup; - } -}); - -// node_modules/debug/src/browser.js -var require_browser = __commonJS({ - "node_modules/debug/src/browser.js"(exports2, module2) { - "use strict"; - exports2.formatArgs = formatArgs; - exports2.save = save; - exports2.load = load; - exports2.useColors = useColors; - exports2.storage = localstorage(); - exports2.destroy = /* @__PURE__ */ (() => { - let warned = false; - return () => { - if (!warned) { - warned = true; - console.warn("Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`."); + exports2.argStringToArray = argStringToArray; + var ExecState = class _ExecState extends events.EventEmitter { + constructor(options, toolPath) { + super(); + this.processClosed = false; + this.processError = ""; + this.processExitCode = 0; + this.processExited = false; + this.processStderr = false; + this.delay = 1e4; + this.done = false; + this.timeout = null; + if (!toolPath) { + throw new Error("toolPath must not be empty"); + } + this.options = options; + this.toolPath = toolPath; + if (options.delay) { + this.delay = options.delay; } - }; - })(); - exports2.colors = [ - "#0000CC", - "#0000FF", - "#0033CC", - "#0033FF", - "#0066CC", - "#0066FF", - "#0099CC", - "#0099FF", - "#00CC00", - "#00CC33", - "#00CC66", - "#00CC99", - "#00CCCC", - "#00CCFF", - "#3300CC", - "#3300FF", - "#3333CC", - "#3333FF", - "#3366CC", - "#3366FF", - "#3399CC", - "#3399FF", - "#33CC00", - "#33CC33", - "#33CC66", - "#33CC99", - "#33CCCC", - "#33CCFF", - "#6600CC", - "#6600FF", - "#6633CC", - "#6633FF", - "#66CC00", - "#66CC33", - "#9900CC", - "#9900FF", - "#9933CC", - "#9933FF", - "#99CC00", - "#99CC33", - "#CC0000", - "#CC0033", - "#CC0066", - "#CC0099", - "#CC00CC", - "#CC00FF", - "#CC3300", - "#CC3333", - "#CC3366", - "#CC3399", - "#CC33CC", - "#CC33FF", - "#CC6600", - "#CC6633", - "#CC9900", - "#CC9933", - "#CCCC00", - "#CCCC33", - "#FF0000", - "#FF0033", - "#FF0066", - "#FF0099", - "#FF00CC", - "#FF00FF", - "#FF3300", - "#FF3333", - "#FF3366", - "#FF3399", - "#FF33CC", - "#FF33FF", - "#FF6600", - "#FF6633", - "#FF9900", - "#FF9933", - "#FFCC00", - "#FFCC33" - ]; - function useColors() { - if (typeof window !== "undefined" && window.process && (window.process.type === "renderer" || window.process.__nwjs)) { - return true; - } - if (typeof navigator !== "undefined" && navigator.userAgent && navigator.userAgent.toLowerCase().match(/(edge|trident)\/(\d+)/)) { - return false; - } - return typeof document !== "undefined" && document.documentElement && document.documentElement.style && document.documentElement.style.WebkitAppearance || // Is firebug? http://stackoverflow.com/a/398120/376773 - typeof window !== "undefined" && window.console && (window.console.firebug || window.console.exception && window.console.table) || // Is firefox >= v31? - // https://developer.mozilla.org/en-US/docs/Tools/Web_Console#Styling_messages - typeof navigator !== "undefined" && navigator.userAgent && navigator.userAgent.toLowerCase().match(/firefox\/(\d+)/) && parseInt(RegExp.$1, 10) >= 31 || // Double check webkit in userAgent just in case we are in a worker - typeof navigator !== "undefined" && navigator.userAgent && navigator.userAgent.toLowerCase().match(/applewebkit\/(\d+)/); - } - function formatArgs(args) { - args[0] = (this.useColors ? "%c" : "") + this.namespace + (this.useColors ? " %c" : " ") + args[0] + (this.useColors ? "%c " : " ") + "+" + module2.exports.humanize(this.diff); - if (!this.useColors) { - return; } - const c = "color: " + this.color; - args.splice(1, 0, c, "color: inherit"); - let index = 0; - let lastC = 0; - args[0].replace(/%[a-zA-Z%]/g, (match2) => { - if (match2 === "%%") { + CheckComplete() { + if (this.done) { return; } - index++; - if (match2 === "%c") { - lastC = index; - } - }); - args.splice(lastC, 0, c); - } - exports2.log = console.debug || console.log || (() => { - }); - function save(namespaces) { - try { - if (namespaces) { - exports2.storage.setItem("debug", namespaces); - } else { - exports2.storage.removeItem("debug"); + if (this.processClosed) { + this._setResult(); + } else if (this.processExited) { + this.timeout = timers_1.setTimeout(_ExecState.HandleTimeout, this.delay, this); } - } catch (error) { - } - } - function load() { - let r2; - try { - r2 = exports2.storage.getItem("debug"); - } catch (error) { } - if (!r2 && typeof process !== "undefined" && "env" in process) { - r2 = process.env.DEBUG; + _debug(message) { + this.emit("debug", message); } - return r2; - } - function localstorage() { - try { - return localStorage; - } catch (error) { + _setResult() { + let error; + if (this.processExited) { + if (this.processError) { + error = new Error(`There was an error when attempting to execute the process '${this.toolPath}'. This may indicate the process failed to start. Error: ${this.processError}`); + } else if (this.processExitCode !== 0 && !this.options.ignoreReturnCode) { + error = new Error(`The process '${this.toolPath}' failed with exit code ${this.processExitCode}`); + } else if (this.processStderr && this.options.failOnStdErr) { + error = new Error(`The process '${this.toolPath}' failed because one or more lines were written to the STDERR stream`); + } + } + if (this.timeout) { + clearTimeout(this.timeout); + this.timeout = null; + } + this.done = true; + this.emit("done", error, this.processExitCode); } - } - module2.exports = require_common()(exports2); - var { formatters } = module2.exports; - formatters.j = function(v) { - try { - return JSON.stringify(v); - } catch (error) { - return "[UnexpectedJSONParseError]: " + error.message; + static HandleTimeout(state) { + if (state.done) { + return; + } + if (!state.processClosed && state.processExited) { + const message = `The STDIO streams did not close within ${state.delay / 1e3} seconds of the exit event from process '${state.toolPath}'. This may indicate a child process inherited the STDIO streams and has not yet exited.`; + state._debug(message); + } + state._setResult(); } }; } }); -// node_modules/has-flag/index.js -var require_has_flag = __commonJS({ - "node_modules/has-flag/index.js"(exports2, module2) { +// node_modules/@actions/exec/lib/exec.js +var require_exec = __commonJS({ + "node_modules/@actions/exec/lib/exec.js"(exports2) { "use strict"; - module2.exports = (flag, argv = process.argv) => { - const prefix = flag.startsWith("-") ? "" : flag.length === 1 ? "-" : "--"; - const position = argv.indexOf(prefix + flag); - const terminatorPosition = argv.indexOf("--"); - return position !== -1 && (terminatorPosition === -1 || position < terminatorPosition); + var __createBinding = exports2 && exports2.__createBinding || (Object.create ? function(o, m2, k, k2) { + if (k2 === void 0) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { + return m2[k]; + } }); + } : function(o, m2, k, k2) { + if (k2 === void 0) k2 = k; + o[k2] = m2[k]; + }); + var __setModuleDefault = exports2 && exports2.__setModuleDefault || (Object.create ? function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); + } : function(o, v) { + o["default"] = v; + }); + var __importStar = exports2 && exports2.__importStar || function(mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) { + for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + } + __setModuleDefault(result, mod); + return result; + }; + var __awaiter = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { + function adopt(value) { + return value instanceof P ? value : new P(function(resolve) { + resolve(value); + }); + } + return new (P || (P = Promise))(function(resolve, reject) { + function fulfilled(value) { + try { + step(generator.next(value)); + } catch (e2) { + reject(e2); + } + } + function rejected(value) { + try { + step(generator["throw"](value)); + } catch (e2) { + reject(e2); + } + } + function step(result) { + result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); + } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); }; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.getExecOutput = exports2.exec = void 0; + var string_decoder_1 = require("string_decoder"); + var tr = __importStar(require_toolrunner()); + function exec(commandLine, args, options) { + return __awaiter(this, void 0, void 0, function* () { + const commandArgs = tr.argStringToArray(commandLine); + if (commandArgs.length === 0) { + throw new Error(`Parameter 'commandLine' cannot be null or empty.`); + } + const toolPath = commandArgs[0]; + args = commandArgs.slice(1).concat(args || []); + const runner = new tr.ToolRunner(toolPath, args, options); + return runner.exec(); + }); + } + exports2.exec = exec; + function getExecOutput(commandLine, args, options) { + var _a2, _b; + return __awaiter(this, void 0, void 0, function* () { + let stdout = ""; + let stderr = ""; + const stdoutDecoder = new string_decoder_1.StringDecoder("utf8"); + const stderrDecoder = new string_decoder_1.StringDecoder("utf8"); + const originalStdoutListener = (_a2 = options === null || options === void 0 ? void 0 : options.listeners) === null || _a2 === void 0 ? void 0 : _a2.stdout; + const originalStdErrListener = (_b = options === null || options === void 0 ? void 0 : options.listeners) === null || _b === void 0 ? void 0 : _b.stderr; + const stdErrListener = (data) => { + stderr += stderrDecoder.write(data); + if (originalStdErrListener) { + originalStdErrListener(data); + } + }; + const stdOutListener = (data) => { + stdout += stdoutDecoder.write(data); + if (originalStdoutListener) { + originalStdoutListener(data); + } + }; + const listeners = Object.assign(Object.assign({}, options === null || options === void 0 ? void 0 : options.listeners), { stdout: stdOutListener, stderr: stdErrListener }); + const exitCode = yield exec(commandLine, args, Object.assign(Object.assign({}, options), { listeners })); + stdout += stdoutDecoder.end(); + stderr += stderrDecoder.end(); + return { + exitCode, + stdout, + stderr + }; + }); + } + exports2.getExecOutput = getExecOutput; } }); -// node_modules/supports-color/index.js -var require_supports_color = __commonJS({ - "node_modules/supports-color/index.js"(exports2, module2) { +// node_modules/@actions/core/lib/platform.js +var require_platform = __commonJS({ + "node_modules/@actions/core/lib/platform.js"(exports2) { "use strict"; - var os2 = require("os"); - var tty2 = require("tty"); - var hasFlag2 = require_has_flag(); - var { env: env2 } = process; - var forceColor; - if (hasFlag2("no-color") || hasFlag2("no-colors") || hasFlag2("color=false") || hasFlag2("color=never")) { - forceColor = 0; - } else if (hasFlag2("color") || hasFlag2("colors") || hasFlag2("color=true") || hasFlag2("color=always")) { - forceColor = 1; - } - if ("FORCE_COLOR" in env2) { - if (env2.FORCE_COLOR === "true") { - forceColor = 1; - } else if (env2.FORCE_COLOR === "false") { - forceColor = 0; - } else { - forceColor = env2.FORCE_COLOR.length === 0 ? 1 : Math.min(parseInt(env2.FORCE_COLOR, 10), 3); + var __createBinding = exports2 && exports2.__createBinding || (Object.create ? function(o, m2, k, k2) { + if (k2 === void 0) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m2, k); + if (!desc || ("get" in desc ? !m2.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { + return m2[k]; + } }; } - } - function translateLevel2(level) { - if (level === 0) { - return false; + Object.defineProperty(o, k2, desc); + } : function(o, m2, k, k2) { + if (k2 === void 0) k2 = k; + o[k2] = m2[k]; + }); + var __setModuleDefault = exports2 && exports2.__setModuleDefault || (Object.create ? function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); + } : function(o, v) { + o["default"] = v; + }); + var __importStar = exports2 && exports2.__importStar || function(mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) { + for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + } + __setModuleDefault(result, mod); + return result; + }; + var __awaiter = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { + function adopt(value) { + return value instanceof P ? value : new P(function(resolve) { + resolve(value); + }); } + return new (P || (P = Promise))(function(resolve, reject) { + function fulfilled(value) { + try { + step(generator.next(value)); + } catch (e2) { + reject(e2); + } + } + function rejected(value) { + try { + step(generator["throw"](value)); + } catch (e2) { + reject(e2); + } + } + function step(result) { + result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); + } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + }; + var __importDefault = exports2 && exports2.__importDefault || function(mod) { + return mod && mod.__esModule ? mod : { "default": mod }; + }; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.getDetails = exports2.isLinux = exports2.isMacOS = exports2.isWindows = exports2.arch = exports2.platform = void 0; + var os_1 = __importDefault(require("os")); + var exec = __importStar(require_exec()); + var getWindowsInfo = () => __awaiter(void 0, void 0, void 0, function* () { + const { stdout: version } = yield exec.getExecOutput('powershell -command "(Get-CimInstance -ClassName Win32_OperatingSystem).Version"', void 0, { + silent: true + }); + const { stdout: name } = yield exec.getExecOutput('powershell -command "(Get-CimInstance -ClassName Win32_OperatingSystem).Caption"', void 0, { + silent: true + }); return { - level, - hasBasic: true, - has256: level >= 2, - has16m: level >= 3 + name: name.trim(), + version: version.trim() }; + }); + var getMacOsInfo = () => __awaiter(void 0, void 0, void 0, function* () { + var _a2, _b, _c, _d; + const { stdout } = yield exec.getExecOutput("sw_vers", void 0, { + silent: true + }); + const version = (_b = (_a2 = stdout.match(/ProductVersion:\s*(.+)/)) === null || _a2 === void 0 ? void 0 : _a2[1]) !== null && _b !== void 0 ? _b : ""; + const name = (_d = (_c = stdout.match(/ProductName:\s*(.+)/)) === null || _c === void 0 ? void 0 : _c[1]) !== null && _d !== void 0 ? _d : ""; + return { + name, + version + }; + }); + var getLinuxInfo = () => __awaiter(void 0, void 0, void 0, function* () { + const { stdout } = yield exec.getExecOutput("lsb_release", ["-i", "-r", "-s"], { + silent: true + }); + const [name, version] = stdout.trim().split("\n"); + return { + name, + version + }; + }); + exports2.platform = os_1.default.platform(); + exports2.arch = os_1.default.arch(); + exports2.isWindows = exports2.platform === "win32"; + exports2.isMacOS = exports2.platform === "darwin"; + exports2.isLinux = exports2.platform === "linux"; + function getDetails() { + return __awaiter(this, void 0, void 0, function* () { + return Object.assign(Object.assign({}, yield exports2.isWindows ? getWindowsInfo() : exports2.isMacOS ? getMacOsInfo() : getLinuxInfo()), { + platform: exports2.platform, + arch: exports2.arch, + isWindows: exports2.isWindows, + isMacOS: exports2.isMacOS, + isLinux: exports2.isLinux + }); + }); } - function supportsColor2(haveStream, streamIsTTY) { - if (forceColor === 0) { - return 0; - } - if (hasFlag2("color=16m") || hasFlag2("color=full") || hasFlag2("color=truecolor")) { - return 3; - } - if (hasFlag2("color=256")) { - return 2; + exports2.getDetails = getDetails; + } +}); + +// node_modules/@actions/core/lib/core.js +var require_core = __commonJS({ + "node_modules/@actions/core/lib/core.js"(exports2) { + "use strict"; + var __createBinding = exports2 && exports2.__createBinding || (Object.create ? function(o, m2, k, k2) { + if (k2 === void 0) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m2, k); + if (!desc || ("get" in desc ? !m2.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { + return m2[k]; + } }; } - if (haveStream && !streamIsTTY && forceColor === void 0) { - return 0; + Object.defineProperty(o, k2, desc); + } : function(o, m2, k, k2) { + if (k2 === void 0) k2 = k; + o[k2] = m2[k]; + }); + var __setModuleDefault = exports2 && exports2.__setModuleDefault || (Object.create ? function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); + } : function(o, v) { + o["default"] = v; + }); + var __importStar = exports2 && exports2.__importStar || function(mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) { + for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); } - const min = forceColor || 0; - if (env2.TERM === "dumb") { - return min; + __setModuleDefault(result, mod); + return result; + }; + var __awaiter = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { + function adopt(value) { + return value instanceof P ? value : new P(function(resolve) { + resolve(value); + }); } - if (process.platform === "win32") { - const osRelease = os2.release().split("."); - if (Number(osRelease[0]) >= 10 && Number(osRelease[2]) >= 10586) { - return Number(osRelease[2]) >= 14931 ? 3 : 2; + return new (P || (P = Promise))(function(resolve, reject) { + function fulfilled(value) { + try { + step(generator.next(value)); + } catch (e2) { + reject(e2); + } } - return 1; - } - if ("CI" in env2) { - if (["TRAVIS", "CIRCLECI", "APPVEYOR", "GITLAB_CI", "GITHUB_ACTIONS", "BUILDKITE"].some((sign) => sign in env2) || env2.CI_NAME === "codeship") { - return 1; + function rejected(value) { + try { + step(generator["throw"](value)); + } catch (e2) { + reject(e2); + } } - return min; - } - if ("TEAMCITY_VERSION" in env2) { - return /^(9\.(0*[1-9]\d*)\.|\d{2,}\.)/.test(env2.TEAMCITY_VERSION) ? 1 : 0; - } - if (env2.COLORTERM === "truecolor") { - return 3; - } - if ("TERM_PROGRAM" in env2) { - const version2 = parseInt((env2.TERM_PROGRAM_VERSION || "").split(".")[0], 10); - switch (env2.TERM_PROGRAM) { - case "iTerm.app": - return version2 >= 3 ? 3 : 2; - case "Apple_Terminal": - return 2; + function step(result) { + result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - } - if (/-256(color)?$/i.test(env2.TERM)) { - return 2; - } - if (/^screen|^xterm|^vt100|^vt220|^rxvt|color|ansi|cygwin|linux/i.test(env2.TERM)) { - return 1; - } - if ("COLORTERM" in env2) { - return 1; - } - return min; - } - function getSupportLevel(stream) { - const level = supportsColor2(stream, stream && stream.isTTY); - return translateLevel2(level); - } - module2.exports = { - supportsColor: getSupportLevel, - stdout: translateLevel2(supportsColor2(true, tty2.isatty(1))), - stderr: translateLevel2(supportsColor2(true, tty2.isatty(2))) + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); }; - } -}); - -// node_modules/debug/src/node.js -var require_node = __commonJS({ - "node_modules/debug/src/node.js"(exports2, module2) { - "use strict"; - var tty2 = require("tty"); - var util = require("util"); - exports2.init = init; - exports2.log = log; - exports2.formatArgs = formatArgs; - exports2.save = save; - exports2.load = load; - exports2.useColors = useColors; - exports2.destroy = util.deprecate( - () => { - }, - "Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`." - ); - exports2.colors = [6, 2, 3, 4, 5, 1]; - try { - const supportsColor2 = require_supports_color(); - if (supportsColor2 && (supportsColor2.stderr || supportsColor2).level >= 2) { - exports2.colors = [ - 20, - 21, - 26, - 27, - 32, - 33, - 38, - 39, - 40, - 41, - 42, - 43, - 44, - 45, - 56, - 57, - 62, - 63, - 68, - 69, - 74, - 75, - 76, - 77, - 78, - 79, - 80, - 81, - 92, - 93, - 98, - 99, - 112, - 113, - 128, - 129, - 134, - 135, - 148, - 149, - 160, - 161, - 162, - 163, - 164, - 165, - 166, - 167, - 168, - 169, - 170, - 171, - 172, - 173, - 178, - 179, - 184, - 185, - 196, - 197, - 198, - 199, - 200, - 201, - 202, - 203, - 204, - 205, - 206, - 207, - 208, - 209, - 214, - 215, - 220, - 221 - ]; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.platform = exports2.toPlatformPath = exports2.toWin32Path = exports2.toPosixPath = exports2.markdownSummary = exports2.summary = exports2.getIDToken = exports2.getState = exports2.saveState = exports2.group = exports2.endGroup = exports2.startGroup = exports2.info = exports2.notice = exports2.warning = exports2.error = exports2.debug = exports2.isDebug = exports2.setFailed = exports2.setCommandEcho = exports2.setOutput = exports2.getBooleanInput = exports2.getMultilineInput = exports2.getInput = exports2.addPath = exports2.setSecret = exports2.exportVariable = exports2.ExitCode = void 0; + var command_1 = require_command(); + var file_command_1 = require_file_command(); + var utils_1 = require_utils5(); + var os2 = __importStar(require("os")); + var path2 = __importStar(require("path")); + var oidc_utils_1 = require_oidc_utils(); + var ExitCode; + (function(ExitCode2) { + ExitCode2[ExitCode2["Success"] = 0] = "Success"; + ExitCode2[ExitCode2["Failure"] = 1] = "Failure"; + })(ExitCode || (exports2.ExitCode = ExitCode = {})); + function exportVariable(name, val) { + const convertedVal = (0, utils_1.toCommandValue)(val); + process.env[name] = convertedVal; + const filePath = process.env["GITHUB_ENV"] || ""; + if (filePath) { + return (0, file_command_1.issueFileCommand)("ENV", (0, file_command_1.prepareKeyValueMessage)(name, val)); } - } catch (error) { + (0, command_1.issueCommand)("set-env", { name }, convertedVal); } - exports2.inspectOpts = Object.keys(process.env).filter((key) => { - return /^debug_/i.test(key); - }).reduce((obj, key) => { - const prop = key.substring(6).toLowerCase().replace(/_([a-z])/g, (_, k) => { - return k.toUpperCase(); - }); - let val = process.env[key]; - if (/^(yes|on|true|enabled)$/i.test(val)) { - val = true; - } else if (/^(no|off|false|disabled)$/i.test(val)) { - val = false; - } else if (val === "null") { - val = null; + exports2.exportVariable = exportVariable; + function setSecret(secret) { + (0, command_1.issueCommand)("add-mask", {}, secret); + } + exports2.setSecret = setSecret; + function addPath(inputPath) { + const filePath = process.env["GITHUB_PATH"] || ""; + if (filePath) { + (0, file_command_1.issueFileCommand)("PATH", inputPath); } else { - val = Number(val); + (0, command_1.issueCommand)("add-path", {}, inputPath); } - obj[prop] = val; - return obj; - }, {}); - function useColors() { - return "colors" in exports2.inspectOpts ? Boolean(exports2.inspectOpts.colors) : tty2.isatty(process.stderr.fd); + process.env["PATH"] = `${inputPath}${path2.delimiter}${process.env["PATH"]}`; } - function formatArgs(args) { - const { namespace: name, useColors: useColors2 } = this; - if (useColors2) { - const c = this.color; - const colorCode = "\x1B[3" + (c < 8 ? c : "8;5;" + c); - const prefix = ` ${colorCode};1m${name} \x1B[0m`; - args[0] = prefix + args[0].split("\n").join("\n" + prefix); - args.push(colorCode + "m+" + module2.exports.humanize(this.diff) + "\x1B[0m"); - } else { - args[0] = getDate() + name + " " + args[0]; + exports2.addPath = addPath; + function getInput2(name, options) { + const val = process.env[`INPUT_${name.replace(/ /g, "_").toUpperCase()}`] || ""; + if (options && options.required && !val) { + throw new Error(`Input required and not supplied: ${name}`); } + if (options && options.trimWhitespace === false) { + return val; + } + return val.trim(); } - function getDate() { - if (exports2.inspectOpts.hideDate) { - return ""; + exports2.getInput = getInput2; + function getMultilineInput2(name, options) { + const inputs = getInput2(name, options).split("\n").filter((x2) => x2 !== ""); + if (options && options.trimWhitespace === false) { + return inputs; } - return (/* @__PURE__ */ new Date()).toISOString() + " "; + return inputs.map((input) => input.trim()); } - function log(...args) { - return process.stderr.write(util.format(...args) + "\n"); + exports2.getMultilineInput = getMultilineInput2; + function getBooleanInput(name, options) { + const trueValue = ["true", "True", "TRUE"]; + const falseValue = ["false", "False", "FALSE"]; + const val = getInput2(name, options); + if (trueValue.includes(val)) + return true; + if (falseValue.includes(val)) + return false; + throw new TypeError(`Input does not meet YAML 1.2 "Core Schema" specification: ${name} +Support boolean input list: \`true | True | TRUE | false | False | FALSE\``); } - function save(namespaces) { - if (namespaces) { - process.env.DEBUG = namespaces; - } else { - delete process.env.DEBUG; + exports2.getBooleanInput = getBooleanInput; + function setOutput(name, value) { + const filePath = process.env["GITHUB_OUTPUT"] || ""; + if (filePath) { + return (0, file_command_1.issueFileCommand)("OUTPUT", (0, file_command_1.prepareKeyValueMessage)(name, value)); } + process.stdout.write(os2.EOL); + (0, command_1.issueCommand)("set-output", { name }, (0, utils_1.toCommandValue)(value)); } - function load() { - return process.env.DEBUG; + exports2.setOutput = setOutput; + function setCommandEcho(enabled) { + (0, command_1.issue)("echo", enabled ? "on" : "off"); } - function init(debug5) { - debug5.inspectOpts = {}; - const keys = Object.keys(exports2.inspectOpts); - for (let i2 = 0; i2 < keys.length; i2++) { - debug5.inspectOpts[keys[i2]] = exports2.inspectOpts[keys[i2]]; + exports2.setCommandEcho = setCommandEcho; + function setFailed(message) { + process.exitCode = ExitCode.Failure; + error(message); + } + exports2.setFailed = setFailed; + function isDebug() { + return process.env["RUNNER_DEBUG"] === "1"; + } + exports2.isDebug = isDebug; + function debug5(message) { + (0, command_1.issueCommand)("debug", {}, message); + } + exports2.debug = debug5; + function error(message, properties = {}) { + (0, command_1.issueCommand)("error", (0, utils_1.toCommandProperties)(properties), message instanceof Error ? message.toString() : message); + } + exports2.error = error; + function warning(message, properties = {}) { + (0, command_1.issueCommand)("warning", (0, utils_1.toCommandProperties)(properties), message instanceof Error ? message.toString() : message); + } + exports2.warning = warning; + function notice(message, properties = {}) { + (0, command_1.issueCommand)("notice", (0, utils_1.toCommandProperties)(properties), message instanceof Error ? message.toString() : message); + } + exports2.notice = notice; + function info(message) { + process.stdout.write(message + os2.EOL); + } + exports2.info = info; + function startGroup(name) { + (0, command_1.issue)("group", name); + } + exports2.startGroup = startGroup; + function endGroup() { + (0, command_1.issue)("endgroup"); + } + exports2.endGroup = endGroup; + function group(name, fn) { + return __awaiter(this, void 0, void 0, function* () { + startGroup(name); + let result; + try { + result = yield fn(); + } finally { + endGroup(); + } + return result; + }); + } + exports2.group = group; + function saveState(name, value) { + const filePath = process.env["GITHUB_STATE"] || ""; + if (filePath) { + return (0, file_command_1.issueFileCommand)("STATE", (0, file_command_1.prepareKeyValueMessage)(name, value)); } + (0, command_1.issueCommand)("save-state", { name }, (0, utils_1.toCommandValue)(value)); } - module2.exports = require_common()(exports2); - var { formatters } = module2.exports; - formatters.o = function(v) { - this.inspectOpts.colors = this.useColors; - return util.inspect(v, this.inspectOpts).split("\n").map((str) => str.trim()).join(" "); - }; - formatters.O = function(v) { - this.inspectOpts.colors = this.useColors; - return util.inspect(v, this.inspectOpts); - }; - } -}); - -// node_modules/debug/src/index.js -var require_src = __commonJS({ - "node_modules/debug/src/index.js"(exports2, module2) { - "use strict"; - if (typeof process === "undefined" || process.type === "renderer" || process.browser === true || process.__nwjs) { - module2.exports = require_browser(); - } else { - module2.exports = require_node(); + exports2.saveState = saveState; + function getState(name) { + return process.env[`STATE_${name}`] || ""; + } + exports2.getState = getState; + function getIDToken(aud) { + return __awaiter(this, void 0, void 0, function* () { + return yield oidc_utils_1.OidcClient.getIDToken(aud); + }); } + exports2.getIDToken = getIDToken; + var summary_1 = require_summary(); + Object.defineProperty(exports2, "summary", { enumerable: true, get: function() { + return summary_1.summary; + } }); + var summary_2 = require_summary(); + Object.defineProperty(exports2, "markdownSummary", { enumerable: true, get: function() { + return summary_2.markdownSummary; + } }); + var path_utils_1 = require_path_utils(); + Object.defineProperty(exports2, "toPosixPath", { enumerable: true, get: function() { + return path_utils_1.toPosixPath; + } }); + Object.defineProperty(exports2, "toWin32Path", { enumerable: true, get: function() { + return path_utils_1.toWin32Path; + } }); + Object.defineProperty(exports2, "toPlatformPath", { enumerable: true, get: function() { + return path_utils_1.toPlatformPath; + } }); + exports2.platform = __importStar(require_platform()); } }); -// node_modules/@kwsites/file-exists/dist/src/index.js -var require_src2 = __commonJS({ - "node_modules/@kwsites/file-exists/dist/src/index.js"(exports2) { +// node_modules/ms/index.js +var require_ms = __commonJS({ + "node_modules/ms/index.js"(exports2, module2) { "use strict"; - var __importDefault = exports2 && exports2.__importDefault || function(mod) { - return mod && mod.__esModule ? mod : { "default": mod }; - }; - Object.defineProperty(exports2, "__esModule", { value: true }); - var fs_1 = require("fs"); - var debug_1 = __importDefault(require_src()); - var log = debug_1.default("@kwsites/file-exists"); - function check(path2, isFile, isDirectory) { - log(`checking %s`, path2); - try { - const stat2 = fs_1.statSync(path2); - if (stat2.isFile() && isFile) { - log(`[OK] path represents a file`); - return true; - } - if (stat2.isDirectory() && isDirectory) { - log(`[OK] path represents a directory`); - return true; - } - log(`[FAIL] path represents something other than a file or directory`); - return false; - } catch (e2) { - if (e2.code === "ENOENT") { - log(`[FAIL] path is not accessible: %o`, e2); - return false; - } - log(`[FATAL] %o`, e2); - throw e2; + var s2 = 1e3; + var m2 = s2 * 60; + var h2 = m2 * 60; + var d = h2 * 24; + var w = d * 7; + var y = d * 365.25; + module2.exports = function(val, options) { + options = options || {}; + var type = typeof val; + if (type === "string" && val.length > 0) { + return parse3(val); + } else if (type === "number" && isFinite(val)) { + return options.long ? fmtLong(val) : fmtShort(val); + } + throw new Error( + "val is not a non-empty string or a valid number. val=" + JSON.stringify(val) + ); + }; + function parse3(str) { + str = String(str); + if (str.length > 100) { + return; + } + var match2 = /^(-?(?:\d+)?\.?\d+) *(milliseconds?|msecs?|ms|seconds?|secs?|s|minutes?|mins?|m|hours?|hrs?|h|days?|d|weeks?|w|years?|yrs?|y)?$/i.exec( + str + ); + if (!match2) { + return; + } + var n = parseFloat(match2[1]); + var type = (match2[2] || "ms").toLowerCase(); + switch (type) { + case "years": + case "year": + case "yrs": + case "yr": + case "y": + return n * y; + case "weeks": + case "week": + case "w": + return n * w; + case "days": + case "day": + case "d": + return n * d; + case "hours": + case "hour": + case "hrs": + case "hr": + case "h": + return n * h2; + case "minutes": + case "minute": + case "mins": + case "min": + case "m": + return n * m2; + case "seconds": + case "second": + case "secs": + case "sec": + case "s": + return n * s2; + case "milliseconds": + case "millisecond": + case "msecs": + case "msec": + case "ms": + return n; + default: + return void 0; } } - function exists2(path2, type = exports2.READABLE) { - return check(path2, (type & exports2.FILE) > 0, (type & exports2.FOLDER) > 0); + function fmtShort(ms) { + var msAbs = Math.abs(ms); + if (msAbs >= d) { + return Math.round(ms / d) + "d"; + } + if (msAbs >= h2) { + return Math.round(ms / h2) + "h"; + } + if (msAbs >= m2) { + return Math.round(ms / m2) + "m"; + } + if (msAbs >= s2) { + return Math.round(ms / s2) + "s"; + } + return ms + "ms"; } - exports2.exists = exists2; - exports2.FILE = 1; - exports2.FOLDER = 2; - exports2.READABLE = exports2.FILE + exports2.FOLDER; - } -}); - -// node_modules/@kwsites/file-exists/dist/index.js -var require_dist = __commonJS({ - "node_modules/@kwsites/file-exists/dist/index.js"(exports2) { - "use strict"; - function __export3(m2) { - for (var p in m2) - if (!exports2.hasOwnProperty(p)) - exports2[p] = m2[p]; + function fmtLong(ms) { + var msAbs = Math.abs(ms); + if (msAbs >= d) { + return plural(ms, msAbs, d, "day"); + } + if (msAbs >= h2) { + return plural(ms, msAbs, h2, "hour"); + } + if (msAbs >= m2) { + return plural(ms, msAbs, m2, "minute"); + } + if (msAbs >= s2) { + return plural(ms, msAbs, s2, "second"); + } + return ms + " ms"; + } + function plural(ms, msAbs, n, name) { + var isPlural = msAbs >= n * 1.5; + return Math.round(ms / n) + " " + name + (isPlural ? "s" : ""); } - Object.defineProperty(exports2, "__esModule", { value: true }); - __export3(require_src2()); } }); -// node_modules/@kwsites/promise-deferred/dist/index.js -var require_dist2 = __commonJS({ - "node_modules/@kwsites/promise-deferred/dist/index.js"(exports2) { +// node_modules/debug/src/common.js +var require_common = __commonJS({ + "node_modules/debug/src/common.js"(exports2, module2) { "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.createDeferred = exports2.deferred = void 0; - function deferred2() { - let done; - let fail; - let status = "pending"; - const promise = new Promise((_done, _fail) => { - done = _done; - fail = _fail; + function setup(env2) { + createDebug.debug = createDebug; + createDebug.default = createDebug; + createDebug.coerce = coerce; + createDebug.disable = disable; + createDebug.enable = enable; + createDebug.enabled = enabled; + createDebug.humanize = require_ms(); + createDebug.destroy = destroy; + Object.keys(env2).forEach((key) => { + createDebug[key] = env2[key]; }); - return { - promise, - done(result) { - if (status === "pending") { - status = "resolved"; - done(result); + createDebug.names = []; + createDebug.skips = []; + createDebug.formatters = {}; + function selectColor(namespace) { + let hash = 0; + for (let i2 = 0; i2 < namespace.length; i2++) { + hash = (hash << 5) - hash + namespace.charCodeAt(i2); + hash |= 0; + } + return createDebug.colors[Math.abs(hash) % createDebug.colors.length]; + } + createDebug.selectColor = selectColor; + function createDebug(namespace) { + let prevTime; + let enableOverride = null; + let namespacesCache; + let enabledCache; + function debug5(...args) { + if (!debug5.enabled) { + return; } - }, - fail(error) { - if (status === "pending") { - status = "rejected"; - fail(error); + const self2 = debug5; + const curr = Number(/* @__PURE__ */ new Date()); + const ms = curr - (prevTime || curr); + self2.diff = ms; + self2.prev = prevTime; + self2.curr = curr; + prevTime = curr; + args[0] = createDebug.coerce(args[0]); + if (typeof args[0] !== "string") { + args.unshift("%O"); } - }, - get fulfilled() { - return status !== "pending"; - }, - get status() { - return status; + let index = 0; + args[0] = args[0].replace(/%([a-zA-Z%])/g, (match2, format) => { + if (match2 === "%%") { + return "%"; + } + index++; + const formatter = createDebug.formatters[format]; + if (typeof formatter === "function") { + const val = args[index]; + match2 = formatter.call(self2, val); + args.splice(index, 1); + index--; + } + return match2; + }); + createDebug.formatArgs.call(self2, args); + const logFn = self2.log || createDebug.log; + logFn.apply(self2, args); } - }; - } - exports2.deferred = deferred2; - exports2.createDeferred = deferred2; - exports2.default = deferred2; - } -}); - -// node_modules/locate-path/node_modules/path-exists/index.js -var require_path_exists = __commonJS({ - "node_modules/locate-path/node_modules/path-exists/index.js"(exports2, module2) { - "use strict"; - var fs2 = require("fs"); - module2.exports = (fp) => new Promise((resolve) => { - fs2.access(fp, (err) => { - resolve(!err); - }); - }); - module2.exports.sync = (fp) => { - try { - fs2.accessSync(fp); - return true; - } catch (err) { - return false; + debug5.namespace = namespace; + debug5.useColors = createDebug.useColors(); + debug5.color = createDebug.selectColor(namespace); + debug5.extend = extend; + debug5.destroy = createDebug.destroy; + Object.defineProperty(debug5, "enabled", { + enumerable: true, + configurable: false, + get: () => { + if (enableOverride !== null) { + return enableOverride; + } + if (namespacesCache !== createDebug.namespaces) { + namespacesCache = createDebug.namespaces; + enabledCache = createDebug.enabled(namespace); + } + return enabledCache; + }, + set: (v) => { + enableOverride = v; + } + }); + if (typeof createDebug.init === "function") { + createDebug.init(debug5); + } + return debug5; } - }; - } -}); - -// node_modules/p-try/index.js -var require_p_try = __commonJS({ - "node_modules/p-try/index.js"(exports2, module2) { - "use strict"; - module2.exports = (cb) => new Promise((resolve) => { - resolve(cb()); - }); - } -}); - -// node_modules/p-locate/node_modules/p-limit/index.js -var require_p_limit = __commonJS({ - "node_modules/p-locate/node_modules/p-limit/index.js"(exports2, module2) { - "use strict"; - var pTry = require_p_try(); - module2.exports = (concurrency) => { - if (concurrency < 1) { - throw new TypeError("Expected `concurrency` to be a number from 1 and up"); + function extend(namespace, delimiter) { + const newDebug = createDebug(this.namespace + (typeof delimiter === "undefined" ? ":" : delimiter) + namespace); + newDebug.log = this.log; + return newDebug; } - const queue = []; - let activeCount = 0; - const next = () => { - activeCount--; - if (queue.length > 0) { - queue.shift()(); - } - }; - return (fn) => new Promise((resolve, reject) => { - const run2 = () => { - activeCount++; - pTry(fn).then( - (val) => { - resolve(val); - next(); - }, - (err) => { - reject(err); - next(); - } - ); - }; - if (activeCount < concurrency) { - run2(); - } else { - queue.push(run2); + function enable(namespaces) { + createDebug.save(namespaces); + createDebug.namespaces = namespaces; + createDebug.names = []; + createDebug.skips = []; + let i2; + const split = (typeof namespaces === "string" ? namespaces : "").split(/[\s,]+/); + const len = split.length; + for (i2 = 0; i2 < len; i2++) { + if (!split[i2]) { + continue; + } + namespaces = split[i2].replace(/\*/g, ".*?"); + if (namespaces[0] === "-") { + createDebug.skips.push(new RegExp("^" + namespaces.slice(1) + "$")); + } else { + createDebug.names.push(new RegExp("^" + namespaces + "$")); + } } - }); - }; - } -}); - -// node_modules/p-locate/index.js -var require_p_locate = __commonJS({ - "node_modules/p-locate/index.js"(exports2, module2) { - "use strict"; - var pLimit = require_p_limit(); - var EndError = class extends Error { - constructor(value) { - super(); - this.value = value; } - }; - var finder = (el) => Promise.all(el).then((val) => val[1] === true && Promise.reject(new EndError(val[0]))); - module2.exports = (iterable, tester, opts) => { - opts = Object.assign({ - concurrency: Infinity, - preserveOrder: true - }, opts); - const limit = pLimit(opts.concurrency); - const items = Array.from(iterable).map((el) => [el, limit(() => Promise.resolve(el).then(tester))]); - const checkLimit = pLimit(opts.preserveOrder ? 1 : Infinity); - return Promise.all(items.map((el) => checkLimit(() => finder(el)))).then(() => { - }).catch((err) => err instanceof EndError ? err.value : Promise.reject(err)); - }; - } -}); - -// node_modules/locate-path/index.js -var require_locate_path = __commonJS({ - "node_modules/locate-path/index.js"(exports2, module2) { - "use strict"; - var path2 = require("path"); - var pathExists = require_path_exists(); - var pLocate = require_p_locate(); - module2.exports = (iterable, opts) => { - opts = Object.assign({ - cwd: process.cwd() - }, opts); - return pLocate(iterable, (el) => pathExists(path2.resolve(opts.cwd, el)), opts); - }; - module2.exports.sync = (iterable, opts) => { - opts = Object.assign({ - cwd: process.cwd() - }, opts); - for (const el of iterable) { - if (pathExists.sync(path2.resolve(opts.cwd, el))) { - return el; + function disable() { + const namespaces = [ + ...createDebug.names.map(toNamespace), + ...createDebug.skips.map(toNamespace).map((namespace) => "-" + namespace) + ].join(","); + createDebug.enable(""); + return namespaces; + } + function enabled(name) { + if (name[name.length - 1] === "*") { + return true; + } + let i2; + let len; + for (i2 = 0, len = createDebug.skips.length; i2 < len; i2++) { + if (createDebug.skips[i2].test(name)) { + return false; + } + } + for (i2 = 0, len = createDebug.names.length; i2 < len; i2++) { + if (createDebug.names[i2].test(name)) { + return true; + } } + return false; } - }; - } -}); - -// node_modules/find-up/index.js -var require_find_up = __commonJS({ - "node_modules/find-up/index.js"(exports2, module2) { - "use strict"; - var path2 = require("path"); - var locatePath = require_locate_path(); - module2.exports = (filename, opts) => { - opts = opts || {}; - const startDir = path2.resolve(opts.cwd || ""); - const root = path2.parse(startDir).root; - const filenames = [].concat(filename); - return new Promise((resolve) => { - (function find(dir) { - locatePath(filenames, { cwd: dir }).then((file) => { - if (file) { - resolve(path2.join(dir, file)); - } else if (dir === root) { - resolve(null); - } else { - find(path2.dirname(dir)); - } - }); - })(startDir); - }); - }; - module2.exports.sync = (filename, opts) => { - opts = opts || {}; - let dir = path2.resolve(opts.cwd || ""); - const root = path2.parse(dir).root; - const filenames = [].concat(filename); - while (true) { - const file = locatePath.sync(filenames, { cwd: dir }); - if (file) { - return path2.join(dir, file); - } else if (dir === root) { - return null; + function toNamespace(regexp) { + return regexp.toString().substring(2, regexp.toString().length - 2).replace(/\.\*\?$/, "*"); + } + function coerce(val) { + if (val instanceof Error) { + return val.stack || val.message; } - dir = path2.dirname(dir); + return val; } - }; + function destroy() { + console.warn("Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`."); + } + createDebug.enable(createDebug.load()); + return createDebug; + } + module2.exports = setup; } }); -// node_modules/codeowners/node_modules/ignore/ignore.js -var require_ignore = __commonJS({ - "node_modules/codeowners/node_modules/ignore/ignore.js"(exports2, module2) { +// node_modules/debug/src/browser.js +var require_browser = __commonJS({ + "node_modules/debug/src/browser.js"(exports2, module2) { "use strict"; - var _createClass = /* @__PURE__ */ function() { - function defineProperties(target, props) { - for (var i2 = 0; i2 < props.length; i2++) { - var descriptor = props[i2]; - descriptor.enumerable = descriptor.enumerable || false; - descriptor.configurable = true; - if ("value" in descriptor) - descriptor.writable = true; - Object.defineProperty(target, descriptor.key, descriptor); + exports2.formatArgs = formatArgs; + exports2.save = save; + exports2.load = load; + exports2.useColors = useColors; + exports2.storage = localstorage(); + exports2.destroy = /* @__PURE__ */ (() => { + let warned = false; + return () => { + if (!warned) { + warned = true; + console.warn("Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`."); } - } - return function(Constructor, protoProps, staticProps) { - if (protoProps) - defineProperties(Constructor.prototype, protoProps); - if (staticProps) - defineProperties(Constructor, staticProps); - return Constructor; }; - }(); - function _classCallCheck(instance, Constructor) { - if (!(instance instanceof Constructor)) { - throw new TypeError("Cannot call a class as a function"); + })(); + exports2.colors = [ + "#0000CC", + "#0000FF", + "#0033CC", + "#0033FF", + "#0066CC", + "#0066FF", + "#0099CC", + "#0099FF", + "#00CC00", + "#00CC33", + "#00CC66", + "#00CC99", + "#00CCCC", + "#00CCFF", + "#3300CC", + "#3300FF", + "#3333CC", + "#3333FF", + "#3366CC", + "#3366FF", + "#3399CC", + "#3399FF", + "#33CC00", + "#33CC33", + "#33CC66", + "#33CC99", + "#33CCCC", + "#33CCFF", + "#6600CC", + "#6600FF", + "#6633CC", + "#6633FF", + "#66CC00", + "#66CC33", + "#9900CC", + "#9900FF", + "#9933CC", + "#9933FF", + "#99CC00", + "#99CC33", + "#CC0000", + "#CC0033", + "#CC0066", + "#CC0099", + "#CC00CC", + "#CC00FF", + "#CC3300", + "#CC3333", + "#CC3366", + "#CC3399", + "#CC33CC", + "#CC33FF", + "#CC6600", + "#CC6633", + "#CC9900", + "#CC9933", + "#CCCC00", + "#CCCC33", + "#FF0000", + "#FF0033", + "#FF0066", + "#FF0099", + "#FF00CC", + "#FF00FF", + "#FF3300", + "#FF3333", + "#FF3366", + "#FF3399", + "#FF33CC", + "#FF33FF", + "#FF6600", + "#FF6633", + "#FF9900", + "#FF9933", + "#FFCC00", + "#FFCC33" + ]; + function useColors() { + if (typeof window !== "undefined" && window.process && (window.process.type === "renderer" || window.process.__nwjs)) { + return true; } + if (typeof navigator !== "undefined" && navigator.userAgent && navigator.userAgent.toLowerCase().match(/(edge|trident)\/(\d+)/)) { + return false; + } + return typeof document !== "undefined" && document.documentElement && document.documentElement.style && document.documentElement.style.WebkitAppearance || // Is firebug? http://stackoverflow.com/a/398120/376773 + typeof window !== "undefined" && window.console && (window.console.firebug || window.console.exception && window.console.table) || // Is firefox >= v31? + // https://developer.mozilla.org/en-US/docs/Tools/Web_Console#Styling_messages + typeof navigator !== "undefined" && navigator.userAgent && navigator.userAgent.toLowerCase().match(/firefox\/(\d+)/) && parseInt(RegExp.$1, 10) >= 31 || // Double check webkit in userAgent just in case we are in a worker + typeof navigator !== "undefined" && navigator.userAgent && navigator.userAgent.toLowerCase().match(/applewebkit\/(\d+)/); } - module2.exports = function() { - return new IgnoreBase(); - }; - function make_array(subject) { - return Array.isArray(subject) ? subject : [subject]; - } - var REGEX_BLANK_LINE = /^\s+$/; - var REGEX_LEADING_EXCAPED_EXCLAMATION = /^\\\!/; - var REGEX_LEADING_EXCAPED_HASH = /^\\#/; - var SLASH = "/"; - var KEY_IGNORE = typeof Symbol !== "undefined" ? Symbol.for("node-ignore") : "node-ignore"; - var IgnoreBase = function() { - function IgnoreBase2() { - _classCallCheck(this, IgnoreBase2); - this._rules = []; - this[KEY_IGNORE] = true; - this._initCache(); + function formatArgs(args) { + args[0] = (this.useColors ? "%c" : "") + this.namespace + (this.useColors ? " %c" : " ") + args[0] + (this.useColors ? "%c " : " ") + "+" + module2.exports.humanize(this.diff); + if (!this.useColors) { + return; } - _createClass(IgnoreBase2, [{ - key: "_initCache", - value: function _initCache() { - this._cache = {}; - } - // @param {Array.|string|Ignore} pattern - }, { - key: "add", - value: function add(pattern) { - this._added = false; - if (typeof pattern === "string") { - pattern = pattern.split(/\r?\n/g); - } - make_array(pattern).forEach(this._addPattern, this); - if (this._added) { - this._initCache(); - } - return this; - } - // legacy - }, { - key: "addPattern", - value: function addPattern(pattern) { - return this.add(pattern); - } - }, { - key: "_addPattern", - value: function _addPattern(pattern) { - if (pattern && pattern[KEY_IGNORE]) { - this._rules = this._rules.concat(pattern._rules); - this._added = true; - return; - } - if (this._checkPattern(pattern)) { - var rule = this._createRule(pattern); - this._added = true; - this._rules.push(rule); - } - } - }, { - key: "_checkPattern", - value: function _checkPattern(pattern) { - return pattern && typeof pattern === "string" && !REGEX_BLANK_LINE.test(pattern) && pattern.indexOf("#") !== 0; - } - }, { - key: "filter", - value: function filter3(paths) { - var _this = this; - return make_array(paths).filter(function(path2) { - return _this._filter(path2); - }); - } - }, { - key: "createFilter", - value: function createFilter() { - var _this2 = this; - return function(path2) { - return _this2._filter(path2); - }; - } - }, { - key: "ignores", - value: function ignores(path2) { - return !this._filter(path2); - } - }, { - key: "_createRule", - value: function _createRule(pattern) { - var origin = pattern; - var negative = false; - if (pattern.indexOf("!") === 0) { - negative = true; - pattern = pattern.substr(1); - } - pattern = pattern.replace(REGEX_LEADING_EXCAPED_EXCLAMATION, "!").replace(REGEX_LEADING_EXCAPED_HASH, "#"); - var regex = make_regex(pattern, negative); - return { - origin, - pattern, - negative, - regex - }; - } - // @returns `Boolean` true if the `path` is NOT ignored - }, { - key: "_filter", - value: function _filter(path2, slices) { - if (!path2) { - return false; - } - if (path2 in this._cache) { - return this._cache[path2]; - } - if (!slices) { - slices = path2.split(SLASH); - } - slices.pop(); - return this._cache[path2] = slices.length ? this._filter(slices.join(SLASH) + SLASH, slices) && this._test(path2) : this._test(path2); - } - // @returns {Boolean} true if a file is NOT ignored - }, { - key: "_test", - value: function _test(path2) { - var matched = 0; - this._rules.forEach(function(rule) { - if (!(matched ^ rule.negative)) { - matched = rule.negative ^ rule.regex.test(path2); - } - }); - return !matched; - } - }]); - return IgnoreBase2; - }(); - var DEFAULT_REPLACER_PREFIX = [ - // > Trailing spaces are ignored unless they are quoted with backslash ("\") - [ - // (a\ ) -> (a ) - // (a ) -> (a) - // (a \ ) -> (a ) - /\\?\s+$/, - function(match2) { - return match2.indexOf("\\") === 0 ? " " : ""; - } - ], - // replace (\ ) with ' ' - [/\\\s/g, function() { - return " "; - }], - // Escape metacharacters - // which is written down by users but means special for regular expressions. - // > There are 12 characters with special meanings: - // > - the backslash \, - // > - the caret ^, - // > - the dollar sign $, - // > - the period or dot ., - // > - the vertical bar or pipe symbol |, - // > - the question mark ?, - // > - the asterisk or star *, - // > - the plus sign +, - // > - the opening parenthesis (, - // > - the closing parenthesis ), - // > - and the opening square bracket [, - // > - the opening curly brace {, - // > These special characters are often called "metacharacters". - [/[\\\^$.|?*+()\[{]/g, function(match2) { - return "\\" + match2; - }], - // leading slash - [ - // > A leading slash matches the beginning of the pathname. - // > For example, "/*.c" matches "cat-file.c" but not "mozilla-sha1/sha1.c". - // A leading slash matches the beginning of the pathname - /^\//, - function() { - return "^"; - } - ], - // replace special metacharacter slash after the leading slash - [/\//g, function() { - return "\\/"; - }], - [ - // > A leading "**" followed by a slash means match in all directories. - // > For example, "**/foo" matches file or directory "foo" anywhere, - // > the same as pattern "foo". - // > "**/foo/bar" matches file or directory "bar" anywhere that is directly under directory "foo". - // Notice that the '*'s have been replaced as '\\*' - /^\^*\\\*\\\*\\\//, - // '**/foo' <-> 'foo' - function() { - return "^(?:.*\\/)?"; - } - ] - ]; - var DEFAULT_REPLACER_SUFFIX = [ - // starting - [ - // there will be no leading '/' (which has been replaced by section "leading slash") - // If starts with '**', adding a '^' to the regular expression also works - /^(?=[^\^])/, - function() { - return !/\/(?!$)/.test(this) ? "(?:^|\\/)" : "^"; - } - ], - // two globstars - [ - // Use lookahead assertions so that we could match more than one `'/**'` - /\\\/\\\*\\\*(?=\\\/|$)/g, - // Zero, one or several directories - // should not use '*', or it will be replaced by the next replacer - // Check if it is not the last `'/**'` - function(match2, index, str) { - return index + 6 < str.length ? "(?:\\/[^\\/]+)*" : "\\/.+"; - } - ], - // intermediate wildcards - [ - // Never replace escaped '*' - // ignore rule '\*' will match the path '*' - // 'abc.*/' -> go - // 'abc.*' -> skip this rule - /(^|[^\\]+)\\\*(?=.+)/g, - // '*.js' matches '.js' - // '*.js' doesn't match 'abc' - function(match2, p1) { - return p1 + "[^\\/]*"; + const c = "color: " + this.color; + args.splice(1, 0, c, "color: inherit"); + let index = 0; + let lastC = 0; + args[0].replace(/%[a-zA-Z%]/g, (match2) => { + if (match2 === "%%") { + return; } - ], - // trailing wildcard - [/(\^|\\\/)?\\\*$/, function(match2, p1) { - return (p1 ? p1 + "[^/]+" : "[^/]*") + "(?=$|\\/$)"; - }], - [ - // unescape - /\\\\\\/g, - function() { - return "\\"; + index++; + if (match2 === "%c") { + lastC = index; } - ] - ]; - var POSITIVE_REPLACERS = [].concat(DEFAULT_REPLACER_PREFIX, [ - // 'f' - // matches - // - /f(end) - // - /f/ - // - (start)f(end) - // - (start)f/ - // doesn't match - // - oof - // - foo - // pseudo: - // -> (^|/)f(/|$) - // ending - [ - // 'js' will not match 'js.' - // 'ab' will not match 'abc' - /(?:[^*\/])$/, - // 'js*' will not match 'a.js' - // 'js/' will not match 'a.js' - // 'js' will match 'a.js' and 'a.js/' - function(match2) { - return match2 + "(?=$|\\/)"; + }); + args.splice(lastC, 0, c); + } + exports2.log = console.debug || console.log || (() => { + }); + function save(namespaces) { + try { + if (namespaces) { + exports2.storage.setItem("debug", namespaces); + } else { + exports2.storage.removeItem("debug"); } - ] - ], DEFAULT_REPLACER_SUFFIX); - var NEGATIVE_REPLACERS = [].concat(DEFAULT_REPLACER_PREFIX, [ - // #24, #38 - // The MISSING rule of [gitignore docs](https://git-scm.com/docs/gitignore) - // A negative pattern without a trailing wildcard should not - // re-include the things inside that directory. - // eg: - // ['node_modules/*', '!node_modules'] - // should ignore `node_modules/a.js` - [/(?:[^*])$/, function(match2) { - return match2 + "(?=$|\\/$)"; - }] - ], DEFAULT_REPLACER_SUFFIX); - var cache2 = {}; - function make_regex(pattern, negative) { - var r2 = cache2[pattern]; - if (r2) { - return r2; + } catch (error) { } - var replacers = negative ? NEGATIVE_REPLACERS : POSITIVE_REPLACERS; - var source = replacers.reduce(function(prev, current) { - return prev.replace(current[0], current[1].bind(pattern)); - }, pattern); - return cache2[pattern] = new RegExp(source, "i"); - } - if ( - // Detect `process` so that it can run in browsers. - typeof process !== "undefined" && (process.env && process.env.IGNORE_TEST_WIN32 || process.platform === "win32") - ) { - filter2 = IgnoreBase.prototype._filter; - make_posix = function make_posix2(str) { - return /^\\\\\?\\/.test(str) || /[^\x00-\x80]+/.test(str) ? str : str.replace(/\\/g, "/"); - }; - IgnoreBase.prototype._filter = function(path2, slices) { - path2 = make_posix(path2); - return filter2.call(this, path2, slices); - }; } - var filter2; - var make_posix; - } -}); - -// node_modules/is-directory/index.js -var require_is_directory = __commonJS({ - "node_modules/is-directory/index.js"(exports2, module2) { - "use strict"; - var fs2 = require("fs"); - function isDirectory(filepath, cb) { - if (typeof cb !== "function") { - throw new Error("expected a callback function"); + function load() { + let r2; + try { + r2 = exports2.storage.getItem("debug"); + } catch (error) { } - if (typeof filepath !== "string") { - cb(new Error("expected filepath to be a string")); - return; + if (!r2 && typeof process !== "undefined" && "env" in process) { + r2 = process.env.DEBUG; } - fs2.stat(filepath, function(err, stats) { - if (err) { - if (err.code === "ENOENT") { - cb(null, false); - return; - } - cb(err); - return; - } - cb(null, stats.isDirectory()); - }); + return r2; } - isDirectory.sync = function isDirectorySync(filepath) { - if (typeof filepath !== "string") { - throw new Error("expected filepath to be a string"); + function localstorage() { + try { + return localStorage; + } catch (error) { } + } + module2.exports = require_common()(exports2); + var { formatters } = module2.exports; + formatters.j = function(v) { try { - var stat2 = fs2.statSync(filepath); - return stat2.isDirectory(); - } catch (err) { - if (err.code === "ENOENT") { - return false; - } else { - throw err; - } + return JSON.stringify(v); + } catch (error) { + return "[UnexpectedJSONParseError]: " + error.message; } - return false; }; - module2.exports = isDirectory; } }); -// node_modules/fs.realpath/old.js -var require_old = __commonJS({ - "node_modules/fs.realpath/old.js"(exports2) { +// node_modules/has-flag/index.js +var require_has_flag = __commonJS({ + "node_modules/has-flag/index.js"(exports2, module2) { "use strict"; - var pathModule = require("path"); - var isWindows = process.platform === "win32"; - var fs2 = require("fs"); - var DEBUG = process.env.NODE_DEBUG && /fs/.test(process.env.NODE_DEBUG); - function rethrow() { - var callback; - if (DEBUG) { - var backtrace = new Error(); - callback = debugCallback; - } else - callback = missingCallback; - return callback; - function debugCallback(err) { - if (err) { - backtrace.message = err.message; - err = backtrace; - missingCallback(err); - } - } - function missingCallback(err) { - if (err) { - if (process.throwDeprecation) - throw err; - else if (!process.noDeprecation) { - var msg = "fs: missing callback " + (err.stack || err.message); - if (process.traceDeprecation) - console.trace(msg); - else - console.error(msg); - } - } - } - } - function maybeCallback(cb) { - return typeof cb === "function" ? cb : rethrow(); + module2.exports = (flag, argv = process.argv) => { + const prefix = flag.startsWith("-") ? "" : flag.length === 1 ? "-" : "--"; + const position = argv.indexOf(prefix + flag); + const terminatorPosition = argv.indexOf("--"); + return position !== -1 && (terminatorPosition === -1 || position < terminatorPosition); + }; + } +}); + +// node_modules/supports-color/index.js +var require_supports_color = __commonJS({ + "node_modules/supports-color/index.js"(exports2, module2) { + "use strict"; + var os2 = require("os"); + var tty2 = require("tty"); + var hasFlag2 = require_has_flag(); + var { env: env2 } = process; + var forceColor; + if (hasFlag2("no-color") || hasFlag2("no-colors") || hasFlag2("color=false") || hasFlag2("color=never")) { + forceColor = 0; + } else if (hasFlag2("color") || hasFlag2("colors") || hasFlag2("color=true") || hasFlag2("color=always")) { + forceColor = 1; } - var normalize2 = pathModule.normalize; - if (isWindows) { - nextPartRe = /(.*?)(?:[\/\\]+|$)/g; - } else { - nextPartRe = /(.*?)(?:[\/]+|$)/g; + if ("FORCE_COLOR" in env2) { + if (env2.FORCE_COLOR === "true") { + forceColor = 1; + } else if (env2.FORCE_COLOR === "false") { + forceColor = 0; + } else { + forceColor = env2.FORCE_COLOR.length === 0 ? 1 : Math.min(parseInt(env2.FORCE_COLOR, 10), 3); + } } - var nextPartRe; - if (isWindows) { - splitRootRe = /^(?:[a-zA-Z]:|[\\\/]{2}[^\\\/]+[\\\/][^\\\/]+)?[\\\/]*/; - } else { - splitRootRe = /^[\/]*/; + function translateLevel2(level) { + if (level === 0) { + return false; + } + return { + level, + hasBasic: true, + has256: level >= 2, + has16m: level >= 3 + }; } - var splitRootRe; - exports2.realpathSync = function realpathSync(p, cache2) { - p = pathModule.resolve(p); - if (cache2 && Object.prototype.hasOwnProperty.call(cache2, p)) { - return cache2[p]; + function supportsColor2(haveStream, streamIsTTY) { + if (forceColor === 0) { + return 0; } - var original = p, seenLinks = {}, knownHard = {}; - var pos; - var current; - var base; - var previous; - start(); - function start() { - var m2 = splitRootRe.exec(p); - pos = m2[0].length; - current = m2[0]; - base = m2[0]; - previous = ""; - if (isWindows && !knownHard[base]) { - fs2.lstatSync(base); - knownHard[base] = true; - } + if (hasFlag2("color=16m") || hasFlag2("color=full") || hasFlag2("color=truecolor")) { + return 3; } - while (pos < p.length) { - nextPartRe.lastIndex = pos; - var result = nextPartRe.exec(p); - previous = current; - current += result[0]; - base = previous + result[1]; - pos = nextPartRe.lastIndex; - if (knownHard[base] || cache2 && cache2[base] === base) { - continue; - } - var resolvedLink; - if (cache2 && Object.prototype.hasOwnProperty.call(cache2, base)) { - resolvedLink = cache2[base]; - } else { - var stat2 = fs2.lstatSync(base); - if (!stat2.isSymbolicLink()) { - knownHard[base] = true; - if (cache2) - cache2[base] = base; - continue; - } - var linkTarget = null; - if (!isWindows) { - var id = stat2.dev.toString(32) + ":" + stat2.ino.toString(32); - if (seenLinks.hasOwnProperty(id)) { - linkTarget = seenLinks[id]; - } - } - if (linkTarget === null) { - fs2.statSync(base); - linkTarget = fs2.readlinkSync(base); - } - resolvedLink = pathModule.resolve(previous, linkTarget); - if (cache2) - cache2[base] = resolvedLink; - if (!isWindows) - seenLinks[id] = linkTarget; - } - p = pathModule.resolve(resolvedLink, p.slice(pos)); - start(); + if (hasFlag2("color=256")) { + return 2; } - if (cache2) - cache2[original] = p; - return p; - }; - exports2.realpath = function realpath(p, cache2, cb) { - if (typeof cb !== "function") { - cb = maybeCallback(cache2); - cache2 = null; + if (haveStream && !streamIsTTY && forceColor === void 0) { + return 0; } - p = pathModule.resolve(p); - if (cache2 && Object.prototype.hasOwnProperty.call(cache2, p)) { - return process.nextTick(cb.bind(null, null, cache2[p])); + const min = forceColor || 0; + if (env2.TERM === "dumb") { + return min; } - var original = p, seenLinks = {}, knownHard = {}; - var pos; - var current; - var base; - var previous; - start(); - function start() { - var m2 = splitRootRe.exec(p); - pos = m2[0].length; - current = m2[0]; - base = m2[0]; - previous = ""; - if (isWindows && !knownHard[base]) { - fs2.lstat(base, function(err) { - if (err) - return cb(err); - knownHard[base] = true; - LOOP(); - }); - } else { - process.nextTick(LOOP); + if (process.platform === "win32") { + const osRelease = os2.release().split("."); + if (Number(osRelease[0]) >= 10 && Number(osRelease[2]) >= 10586) { + return Number(osRelease[2]) >= 14931 ? 3 : 2; } + return 1; } - function LOOP() { - if (pos >= p.length) { - if (cache2) - cache2[original] = p; - return cb(null, p); - } - nextPartRe.lastIndex = pos; - var result = nextPartRe.exec(p); - previous = current; - current += result[0]; - base = previous + result[1]; - pos = nextPartRe.lastIndex; - if (knownHard[base] || cache2 && cache2[base] === base) { - return process.nextTick(LOOP); - } - if (cache2 && Object.prototype.hasOwnProperty.call(cache2, base)) { - return gotResolvedLink(cache2[base]); + if ("CI" in env2) { + if (["TRAVIS", "CIRCLECI", "APPVEYOR", "GITLAB_CI", "GITHUB_ACTIONS", "BUILDKITE"].some((sign) => sign in env2) || env2.CI_NAME === "codeship") { + return 1; } - return fs2.lstat(base, gotStat); + return min; } - function gotStat(err, stat2) { - if (err) - return cb(err); - if (!stat2.isSymbolicLink()) { - knownHard[base] = true; - if (cache2) - cache2[base] = base; - return process.nextTick(LOOP); - } - if (!isWindows) { - var id = stat2.dev.toString(32) + ":" + stat2.ino.toString(32); - if (seenLinks.hasOwnProperty(id)) { - return gotTarget(null, seenLinks[id], base); - } + if ("TEAMCITY_VERSION" in env2) { + return /^(9\.(0*[1-9]\d*)\.|\d{2,}\.)/.test(env2.TEAMCITY_VERSION) ? 1 : 0; + } + if (env2.COLORTERM === "truecolor") { + return 3; + } + if ("TERM_PROGRAM" in env2) { + const version = parseInt((env2.TERM_PROGRAM_VERSION || "").split(".")[0], 10); + switch (env2.TERM_PROGRAM) { + case "iTerm.app": + return version >= 3 ? 3 : 2; + case "Apple_Terminal": + return 2; } - fs2.stat(base, function(err2) { - if (err2) - return cb(err2); - fs2.readlink(base, function(err3, target) { - if (!isWindows) - seenLinks[id] = target; - gotTarget(err3, target); - }); - }); } - function gotTarget(err, target, base2) { - if (err) - return cb(err); - var resolvedLink = pathModule.resolve(previous, target); - if (cache2) - cache2[base2] = resolvedLink; - gotResolvedLink(resolvedLink); + if (/-256(color)?$/i.test(env2.TERM)) { + return 2; } - function gotResolvedLink(resolvedLink) { - p = pathModule.resolve(resolvedLink, p.slice(pos)); - start(); + if (/^screen|^xterm|^vt100|^vt220|^rxvt|color|ansi|cygwin|linux/i.test(env2.TERM)) { + return 1; } + if ("COLORTERM" in env2) { + return 1; + } + return min; + } + function getSupportLevel(stream) { + const level = supportsColor2(stream, stream && stream.isTTY); + return translateLevel2(level); + } + module2.exports = { + supportsColor: getSupportLevel, + stdout: translateLevel2(supportsColor2(true, tty2.isatty(1))), + stderr: translateLevel2(supportsColor2(true, tty2.isatty(2))) }; } }); -// node_modules/fs.realpath/index.js -var require_fs = __commonJS({ - "node_modules/fs.realpath/index.js"(exports2, module2) { +// node_modules/debug/src/node.js +var require_node = __commonJS({ + "node_modules/debug/src/node.js"(exports2, module2) { "use strict"; - module2.exports = realpath; - realpath.realpath = realpath; - realpath.sync = realpathSync; - realpath.realpathSync = realpathSync; - realpath.monkeypatch = monkeypatch; - realpath.unmonkeypatch = unmonkeypatch; - var fs2 = require("fs"); - var origRealpath = fs2.realpath; - var origRealpathSync = fs2.realpathSync; - var version2 = process.version; - var ok = /^v[0-5]\./.test(version2); - var old = require_old(); - function newError(er) { - return er && er.syscall === "realpath" && (er.code === "ELOOP" || er.code === "ENOMEM" || er.code === "ENAMETOOLONG"); - } - function realpath(p, cache2, cb) { - if (ok) { - return origRealpath(p, cache2, cb); - } - if (typeof cache2 === "function") { - cb = cache2; - cache2 = null; + var tty2 = require("tty"); + var util = require("util"); + exports2.init = init; + exports2.log = log; + exports2.formatArgs = formatArgs; + exports2.save = save; + exports2.load = load; + exports2.useColors = useColors; + exports2.destroy = util.deprecate( + () => { + }, + "Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`." + ); + exports2.colors = [6, 2, 3, 4, 5, 1]; + try { + const supportsColor2 = require_supports_color(); + if (supportsColor2 && (supportsColor2.stderr || supportsColor2).level >= 2) { + exports2.colors = [ + 20, + 21, + 26, + 27, + 32, + 33, + 38, + 39, + 40, + 41, + 42, + 43, + 44, + 45, + 56, + 57, + 62, + 63, + 68, + 69, + 74, + 75, + 76, + 77, + 78, + 79, + 80, + 81, + 92, + 93, + 98, + 99, + 112, + 113, + 128, + 129, + 134, + 135, + 148, + 149, + 160, + 161, + 162, + 163, + 164, + 165, + 166, + 167, + 168, + 169, + 170, + 171, + 172, + 173, + 178, + 179, + 184, + 185, + 196, + 197, + 198, + 199, + 200, + 201, + 202, + 203, + 204, + 205, + 206, + 207, + 208, + 209, + 214, + 215, + 220, + 221 + ]; } - origRealpath(p, cache2, function(er, result) { - if (newError(er)) { - old.realpath(p, cache2, cb); - } else { - cb(er, result); - } + } catch (error) { + } + exports2.inspectOpts = Object.keys(process.env).filter((key) => { + return /^debug_/i.test(key); + }).reduce((obj, key) => { + const prop = key.substring(6).toLowerCase().replace(/_([a-z])/g, (_, k) => { + return k.toUpperCase(); }); + let val = process.env[key]; + if (/^(yes|on|true|enabled)$/i.test(val)) { + val = true; + } else if (/^(no|off|false|disabled)$/i.test(val)) { + val = false; + } else if (val === "null") { + val = null; + } else { + val = Number(val); + } + obj[prop] = val; + return obj; + }, {}); + function useColors() { + return "colors" in exports2.inspectOpts ? Boolean(exports2.inspectOpts.colors) : tty2.isatty(process.stderr.fd); } - function realpathSync(p, cache2) { - if (ok) { - return origRealpathSync(p, cache2); + function formatArgs(args) { + const { namespace: name, useColors: useColors2 } = this; + if (useColors2) { + const c = this.color; + const colorCode = "\x1B[3" + (c < 8 ? c : "8;5;" + c); + const prefix = ` ${colorCode};1m${name} \x1B[0m`; + args[0] = prefix + args[0].split("\n").join("\n" + prefix); + args.push(colorCode + "m+" + module2.exports.humanize(this.diff) + "\x1B[0m"); + } else { + args[0] = getDate() + name + " " + args[0]; } - try { - return origRealpathSync(p, cache2); - } catch (er) { - if (newError(er)) { - return old.realpathSync(p, cache2); - } else { - throw er; - } + } + function getDate() { + if (exports2.inspectOpts.hideDate) { + return ""; } + return (/* @__PURE__ */ new Date()).toISOString() + " "; } - function monkeypatch() { - fs2.realpath = realpath; - fs2.realpathSync = realpathSync; + function log(...args) { + return process.stderr.write(util.format(...args) + "\n"); } - function unmonkeypatch() { - fs2.realpath = origRealpath; - fs2.realpathSync = origRealpathSync; + function save(namespaces) { + if (namespaces) { + process.env.DEBUG = namespaces; + } else { + delete process.env.DEBUG; + } } - } -}); - -// node_modules/concat-map/index.js -var require_concat_map = __commonJS({ - "node_modules/concat-map/index.js"(exports2, module2) { - "use strict"; - module2.exports = function(xs, fn) { - var res = []; - for (var i2 = 0; i2 < xs.length; i2++) { - var x2 = fn(xs[i2], i2); - if (isArray(x2)) - res.push.apply(res, x2); - else - res.push(x2); + function load() { + return process.env.DEBUG; + } + function init(debug5) { + debug5.inspectOpts = {}; + const keys = Object.keys(exports2.inspectOpts); + for (let i2 = 0; i2 < keys.length; i2++) { + debug5.inspectOpts[keys[i2]] = exports2.inspectOpts[keys[i2]]; } - return res; + } + module2.exports = require_common()(exports2); + var { formatters } = module2.exports; + formatters.o = function(v) { + this.inspectOpts.colors = this.useColors; + return util.inspect(v, this.inspectOpts).split("\n").map((str) => str.trim()).join(" "); }; - var isArray = Array.isArray || function(xs) { - return Object.prototype.toString.call(xs) === "[object Array]"; + formatters.O = function(v) { + this.inspectOpts.colors = this.useColors; + return util.inspect(v, this.inspectOpts); }; } }); -// node_modules/balanced-match/index.js -var require_balanced_match = __commonJS({ - "node_modules/balanced-match/index.js"(exports2, module2) { +// node_modules/debug/src/index.js +var require_src = __commonJS({ + "node_modules/debug/src/index.js"(exports2, module2) { "use strict"; - module2.exports = balanced; - function balanced(a, b, str) { - if (a instanceof RegExp) - a = maybeMatch(a, str); - if (b instanceof RegExp) - b = maybeMatch(b, str); - var r2 = range(a, b, str); - return r2 && { - start: r2[0], - end: r2[1], - pre: str.slice(0, r2[0]), - body: str.slice(r2[0] + a.length, r2[1]), - post: str.slice(r2[1] + b.length) - }; - } - function maybeMatch(reg, str) { - var m2 = str.match(reg); - return m2 ? m2[0] : null; - } - balanced.range = range; - function range(a, b, str) { - var begs, beg, left, right, result; - var ai = str.indexOf(a); - var bi = str.indexOf(b, ai + 1); - var i2 = ai; - if (ai >= 0 && bi > 0) { - if (a === b) { - return [ai, bi]; - } - begs = []; - left = str.length; - while (i2 >= 0 && !result) { - if (i2 == ai) { - begs.push(i2); - ai = str.indexOf(a, i2 + 1); - } else if (begs.length == 1) { - result = [begs.pop(), bi]; - } else { - beg = begs.pop(); - if (beg < left) { - left = beg; - right = bi; - } - bi = str.indexOf(b, i2 + 1); - } - i2 = ai < bi && ai >= 0 ? ai : bi; - } - if (begs.length) { - result = [left, right]; - } - } - return result; + if (typeof process === "undefined" || process.type === "renderer" || process.browser === true || process.__nwjs) { + module2.exports = require_browser(); + } else { + module2.exports = require_node(); } } }); -// node_modules/glob/node_modules/minimatch/node_modules/brace-expansion/index.js -var require_brace_expansion = __commonJS({ - "node_modules/glob/node_modules/minimatch/node_modules/brace-expansion/index.js"(exports2, module2) { +// node_modules/@kwsites/file-exists/dist/src/index.js +var require_src2 = __commonJS({ + "node_modules/@kwsites/file-exists/dist/src/index.js"(exports2) { "use strict"; - var concatMap = require_concat_map(); - var balanced = require_balanced_match(); - module2.exports = expandTop; - var escSlash = "\0SLASH" + Math.random() + "\0"; - var escOpen = "\0OPEN" + Math.random() + "\0"; - var escClose = "\0CLOSE" + Math.random() + "\0"; - var escComma = "\0COMMA" + Math.random() + "\0"; - var escPeriod = "\0PERIOD" + Math.random() + "\0"; - function numeric(str) { - return parseInt(str, 10) == str ? parseInt(str, 10) : str.charCodeAt(0); - } - function escapeBraces(str) { - return str.split("\\\\").join(escSlash).split("\\{").join(escOpen).split("\\}").join(escClose).split("\\,").join(escComma).split("\\.").join(escPeriod); - } - function unescapeBraces(str) { - return str.split(escSlash).join("\\").split(escOpen).join("{").split(escClose).join("}").split(escComma).join(",").split(escPeriod).join("."); - } - function parseCommaParts(str) { - if (!str) - return [""]; - var parts = []; - var m2 = balanced("{", "}", str); - if (!m2) - return str.split(","); - var pre = m2.pre; - var body = m2.body; - var post = m2.post; - var p = pre.split(","); - p[p.length - 1] += "{" + body + "}"; - var postParts = parseCommaParts(post); - if (post.length) { - p[p.length - 1] += postParts.shift(); - p.push.apply(p, postParts); - } - parts.push.apply(parts, p); - return parts; - } - function expandTop(str) { - if (!str) - return []; - if (str.substr(0, 2) === "{}") { - str = "\\{\\}" + str.substr(2); - } - return expand3(escapeBraces(str), true).map(unescapeBraces); - } - function embrace(str) { - return "{" + str + "}"; - } - function isPadded(el) { - return /^-?0\d/.test(el); - } - function lte(i2, y) { - return i2 <= y; - } - function gte(i2, y) { - return i2 >= y; - } - function expand3(str, isTop) { - var expansions = []; - var m2 = balanced("{", "}", str); - if (!m2 || /\$$/.test(m2.pre)) - return [str]; - var isNumericSequence = /^-?\d+\.\.-?\d+(?:\.\.-?\d+)?$/.test(m2.body); - var isAlphaSequence = /^[a-zA-Z]\.\.[a-zA-Z](?:\.\.-?\d+)?$/.test(m2.body); - var isSequence = isNumericSequence || isAlphaSequence; - var isOptions = m2.body.indexOf(",") >= 0; - if (!isSequence && !isOptions) { - if (m2.post.match(/,.*\}/)) { - str = m2.pre + "{" + m2.body + escClose + m2.post; - return expand3(str); - } - return [str]; - } - var n; - if (isSequence) { - n = m2.body.split(/\.\./); - } else { - n = parseCommaParts(m2.body); - if (n.length === 1) { - n = expand3(n[0], false).map(embrace); - if (n.length === 1) { - var post = m2.post.length ? expand3(m2.post, false) : [""]; - return post.map(function(p) { - return m2.pre + n[0] + p; - }); - } - } - } - var pre = m2.pre; - var post = m2.post.length ? expand3(m2.post, false) : [""]; - var N; - if (isSequence) { - var x2 = numeric(n[0]); - var y = numeric(n[1]); - var width = Math.max(n[0].length, n[1].length); - var incr = n.length == 3 ? Math.abs(numeric(n[2])) : 1; - var test = lte; - var reverse = y < x2; - if (reverse) { - incr *= -1; - test = gte; + var __importDefault = exports2 && exports2.__importDefault || function(mod) { + return mod && mod.__esModule ? mod : { "default": mod }; + }; + Object.defineProperty(exports2, "__esModule", { value: true }); + var fs_1 = require("fs"); + var debug_1 = __importDefault(require_src()); + var log = debug_1.default("@kwsites/file-exists"); + function check(path2, isFile, isDirectory) { + log(`checking %s`, path2); + try { + const stat2 = fs_1.statSync(path2); + if (stat2.isFile() && isFile) { + log(`[OK] path represents a file`); + return true; } - var pad = n.some(isPadded); - N = []; - for (var i2 = x2; test(i2, y); i2 += incr) { - var c; - if (isAlphaSequence) { - c = String.fromCharCode(i2); - if (c === "\\") - c = ""; - } else { - c = String(i2); - if (pad) { - var need = width - c.length; - if (need > 0) { - var z = new Array(need + 1).join("0"); - if (i2 < 0) - c = "-" + z + c.slice(1); - else - c = z + c; - } - } - } - N.push(c); + if (stat2.isDirectory() && isDirectory) { + log(`[OK] path represents a directory`); + return true; } - } else { - N = concatMap(n, function(el) { - return expand3(el, false); - }); - } - for (var j = 0; j < N.length; j++) { - for (var k = 0; k < post.length; k++) { - var expansion = pre + N[j] + post[k]; - if (!isTop || isSequence || expansion) - expansions.push(expansion); + log(`[FAIL] path represents something other than a file or directory`); + return false; + } catch (e2) { + if (e2.code === "ENOENT") { + log(`[FAIL] path is not accessible: %o`, e2); + return false; } + log(`[FATAL] %o`, e2); + throw e2; } - return expansions; } + function exists2(path2, type = exports2.READABLE) { + return check(path2, (type & exports2.FILE) > 0, (type & exports2.FOLDER) > 0); + } + exports2.exists = exists2; + exports2.FILE = 1; + exports2.FOLDER = 2; + exports2.READABLE = exports2.FILE + exports2.FOLDER; } }); -// node_modules/glob/node_modules/minimatch/minimatch.js -var require_minimatch = __commonJS({ - "node_modules/glob/node_modules/minimatch/minimatch.js"(exports2, module2) { +// node_modules/@kwsites/file-exists/dist/index.js +var require_dist = __commonJS({ + "node_modules/@kwsites/file-exists/dist/index.js"(exports2) { "use strict"; - module2.exports = minimatch2; - minimatch2.Minimatch = Minimatch2; - var path2 = function() { - try { - return require("path"); - } catch (e2) { - } - }() || { - sep: "/" - }; - minimatch2.sep = path2.sep; - var GLOBSTAR2 = minimatch2.GLOBSTAR = Minimatch2.GLOBSTAR = {}; - var expand3 = require_brace_expansion(); - var plTypes = { - "!": { open: "(?:(?!(?:", close: "))[^/]*?)" }, - "?": { open: "(?:", close: ")?" }, - "+": { open: "(?:", close: ")+" }, - "*": { open: "(?:", close: ")*" }, - "@": { open: "(?:", close: ")" } - }; - var qmark3 = "[^/]"; - var star3 = qmark3 + "*?"; - var twoStarDot2 = "(?:(?!(?:\\/|^)(?:\\.{1,2})($|\\/)).)*?"; - var twoStarNoDot2 = "(?:(?!(?:\\/|^)\\.).)*?"; - var reSpecials2 = charSet("().*{}+?[]^$\\!"); - function charSet(s2) { - return s2.split("").reduce(function(set2, c) { - set2[c] = true; - return set2; - }, {}); + function __export3(m2) { + for (var p in m2) if (!exports2.hasOwnProperty(p)) exports2[p] = m2[p]; } - var slashSplit = /\/+/; - minimatch2.filter = filter2; - function filter2(pattern, options) { + Object.defineProperty(exports2, "__esModule", { value: true }); + __export3(require_src2()); + } +}); + +// node_modules/simple-git/node_modules/debug/node_modules/ms/index.js +var require_ms2 = __commonJS({ + "node_modules/simple-git/node_modules/debug/node_modules/ms/index.js"(exports2, module2) { + "use strict"; + var s2 = 1e3; + var m2 = s2 * 60; + var h2 = m2 * 60; + var d = h2 * 24; + var w = d * 7; + var y = d * 365.25; + module2.exports = function(val, options) { options = options || {}; - return function(p, i2, list) { - return minimatch2(p, pattern, options); - }; - } - function ext2(a, b) { - b = b || {}; - var t2 = {}; - Object.keys(a).forEach(function(k) { - t2[k] = a[k]; - }); - Object.keys(b).forEach(function(k) { - t2[k] = b[k]; - }); - return t2; - } - minimatch2.defaults = function(def) { - if (!def || typeof def !== "object" || !Object.keys(def).length) { - return minimatch2; - } - var orig = minimatch2; - var m2 = function minimatch3(p, pattern, options) { - return orig(p, pattern, ext2(def, options)); - }; - m2.Minimatch = function Minimatch3(pattern, options) { - return new orig.Minimatch(pattern, ext2(def, options)); - }; - m2.Minimatch.defaults = function defaults2(options) { - return orig.defaults(ext2(def, options)).Minimatch; - }; - m2.filter = function filter3(pattern, options) { - return orig.filter(pattern, ext2(def, options)); - }; - m2.defaults = function defaults2(options) { - return orig.defaults(ext2(def, options)); - }; - m2.makeRe = function makeRe3(pattern, options) { - return orig.makeRe(pattern, ext2(def, options)); - }; - m2.braceExpand = function braceExpand3(pattern, options) { - return orig.braceExpand(pattern, ext2(def, options)); - }; - m2.match = function(list, pattern, options) { - return orig.match(list, pattern, ext2(def, options)); - }; - return m2; - }; - Minimatch2.defaults = function(def) { - return minimatch2.defaults(def).Minimatch; - }; - function minimatch2(p, pattern, options) { - assertValidPattern2(pattern); - if (!options) - options = {}; - if (!options.nocomment && pattern.charAt(0) === "#") { - return false; - } - return new Minimatch2(pattern, options).match(p); - } - function Minimatch2(pattern, options) { - if (!(this instanceof Minimatch2)) { - return new Minimatch2(pattern, options); - } - assertValidPattern2(pattern); - if (!options) - options = {}; - pattern = pattern.trim(); - if (!options.allowWindowsEscape && path2.sep !== "/") { - pattern = pattern.split(path2.sep).join("/"); + var type = typeof val; + if (type === "string" && val.length > 0) { + return parse3(val); + } else if (type === "number" && isFinite(val)) { + return options.long ? fmtLong(val) : fmtShort(val); } - this.options = options; - this.set = []; - this.pattern = pattern; - this.regexp = null; - this.negate = false; - this.comment = false; - this.empty = false; - this.partial = !!options.partial; - this.make(); - } - Minimatch2.prototype.debug = function() { + throw new Error( + "val is not a non-empty string or a valid number. val=" + JSON.stringify(val) + ); }; - Minimatch2.prototype.make = make; - function make() { - var pattern = this.pattern; - var options = this.options; - if (!options.nocomment && pattern.charAt(0) === "#") { - this.comment = true; + function parse3(str) { + str = String(str); + if (str.length > 100) { return; } - if (!pattern) { - this.empty = true; + var match2 = /^(-?(?:\d+)?\.?\d+) *(milliseconds?|msecs?|ms|seconds?|secs?|s|minutes?|mins?|m|hours?|hrs?|h|days?|d|weeks?|w|years?|yrs?|y)?$/i.exec( + str + ); + if (!match2) { return; } - this.parseNegate(); - var set2 = this.globSet = this.braceExpand(); - if (options.debug) - this.debug = function debug5() { - console.error.apply(console, arguments); - }; - this.debug(this.pattern, set2); - set2 = this.globParts = set2.map(function(s2) { - return s2.split(slashSplit); - }); - this.debug(this.pattern, set2); - set2 = set2.map(function(s2, si, set3) { - return s2.map(this.parse, this); - }, this); - this.debug(this.pattern, set2); - set2 = set2.filter(function(s2) { - return s2.indexOf(false) === -1; - }); - this.debug(this.pattern, set2); - this.set = set2; - } - Minimatch2.prototype.parseNegate = parseNegate; - function parseNegate() { - var pattern = this.pattern; - var negate = false; - var options = this.options; - var negateOffset = 0; - if (options.nonegate) - return; - for (var i2 = 0, l = pattern.length; i2 < l && pattern.charAt(i2) === "!"; i2++) { - negate = !negate; - negateOffset++; + var n = parseFloat(match2[1]); + var type = (match2[2] || "ms").toLowerCase(); + switch (type) { + case "years": + case "year": + case "yrs": + case "yr": + case "y": + return n * y; + case "weeks": + case "week": + case "w": + return n * w; + case "days": + case "day": + case "d": + return n * d; + case "hours": + case "hour": + case "hrs": + case "hr": + case "h": + return n * h2; + case "minutes": + case "minute": + case "mins": + case "min": + case "m": + return n * m2; + case "seconds": + case "second": + case "secs": + case "sec": + case "s": + return n * s2; + case "milliseconds": + case "millisecond": + case "msecs": + case "msec": + case "ms": + return n; + default: + return void 0; } - if (negateOffset) - this.pattern = pattern.substr(negateOffset); - this.negate = negate; } - minimatch2.braceExpand = function(pattern, options) { - return braceExpand2(pattern, options); - }; - Minimatch2.prototype.braceExpand = braceExpand2; - function braceExpand2(pattern, options) { - if (!options) { - if (this instanceof Minimatch2) { - options = this.options; - } else { - options = {}; - } + function fmtShort(ms) { + var msAbs = Math.abs(ms); + if (msAbs >= d) { + return Math.round(ms / d) + "d"; } - pattern = typeof pattern === "undefined" ? this.pattern : pattern; - assertValidPattern2(pattern); - if (options.nobrace || !/\{(?:(?!\{).)*\}/.test(pattern)) { - return [pattern]; + if (msAbs >= h2) { + return Math.round(ms / h2) + "h"; + } + if (msAbs >= m2) { + return Math.round(ms / m2) + "m"; + } + if (msAbs >= s2) { + return Math.round(ms / s2) + "s"; } - return expand3(pattern); + return ms + "ms"; } - var MAX_PATTERN_LENGTH2 = 1024 * 64; - var assertValidPattern2 = function(pattern) { - if (typeof pattern !== "string") { - throw new TypeError("invalid pattern"); + function fmtLong(ms) { + var msAbs = Math.abs(ms); + if (msAbs >= d) { + return plural(ms, msAbs, d, "day"); } - if (pattern.length > MAX_PATTERN_LENGTH2) { - throw new TypeError("pattern is too long"); + if (msAbs >= h2) { + return plural(ms, msAbs, h2, "hour"); } - }; - Minimatch2.prototype.parse = parse3; - var SUBPARSE = {}; - function parse3(pattern, isSub) { - assertValidPattern2(pattern); - var options = this.options; - if (pattern === "**") { - if (!options.noglobstar) - return GLOBSTAR2; - else - pattern = "*"; + if (msAbs >= m2) { + return plural(ms, msAbs, m2, "minute"); } - if (pattern === "") - return ""; - var re = ""; - var hasMagic = !!options.nocase; - var escaping = false; - var patternListStack = []; - var negativeLists = []; - var stateChar; - var inClass = false; - var reClassStart = -1; - var classStart = -1; - var patternStart = pattern.charAt(0) === "." ? "" : options.dot ? "(?!(?:^|\\/)\\.{1,2}(?:$|\\/))" : "(?!\\.)"; - var self2 = this; - function clearStateChar() { - if (stateChar) { - switch (stateChar) { - case "*": - re += star3; - hasMagic = true; - break; - case "?": - re += qmark3; - hasMagic = true; - break; - default: - re += "\\" + stateChar; - break; - } - self2.debug("clearStateChar %j %j", stateChar, re); - stateChar = false; - } + if (msAbs >= s2) { + return plural(ms, msAbs, s2, "second"); } - for (var i2 = 0, len = pattern.length, c; i2 < len && (c = pattern.charAt(i2)); i2++) { - this.debug("%s %s %s %j", pattern, i2, re, c); - if (escaping && reSpecials2[c]) { - re += "\\" + c; - escaping = false; - continue; + return ms + " ms"; + } + function plural(ms, msAbs, n, name) { + var isPlural = msAbs >= n * 1.5; + return Math.round(ms / n) + " " + name + (isPlural ? "s" : ""); + } + } +}); + +// node_modules/simple-git/node_modules/debug/src/common.js +var require_common2 = __commonJS({ + "node_modules/simple-git/node_modules/debug/src/common.js"(exports2, module2) { + "use strict"; + function setup(env2) { + createDebug.debug = createDebug; + createDebug.default = createDebug; + createDebug.coerce = coerce; + createDebug.disable = disable; + createDebug.enable = enable; + createDebug.enabled = enabled; + createDebug.humanize = require_ms2(); + createDebug.destroy = destroy; + Object.keys(env2).forEach((key) => { + createDebug[key] = env2[key]; + }); + createDebug.names = []; + createDebug.skips = []; + createDebug.formatters = {}; + function selectColor(namespace) { + let hash = 0; + for (let i2 = 0; i2 < namespace.length; i2++) { + hash = (hash << 5) - hash + namespace.charCodeAt(i2); + hash |= 0; } - switch (c) { - case "/": { - return false; - } - case "\\": - clearStateChar(); - escaping = true; - continue; - case "?": - case "*": - case "+": - case "@": - case "!": - this.debug("%s %s %s %j <-- stateChar", pattern, i2, re, c); - if (inClass) { - this.debug(" in class"); - if (c === "!" && i2 === classStart + 1) - c = "^"; - re += c; - continue; - } - self2.debug("call clearStateChar %j", stateChar); - clearStateChar(); - stateChar = c; - if (options.noext) - clearStateChar(); - continue; - case "(": - if (inClass) { - re += "("; - continue; - } - if (!stateChar) { - re += "\\("; - continue; - } - patternListStack.push({ - type: stateChar, - start: i2 - 1, - reStart: re.length, - open: plTypes[stateChar].open, - close: plTypes[stateChar].close - }); - re += stateChar === "!" ? "(?:(?!(?:" : "(?:"; - this.debug("plType %j %j", stateChar, re); - stateChar = false; - continue; - case ")": - if (inClass || !patternListStack.length) { - re += "\\)"; - continue; - } - clearStateChar(); - hasMagic = true; - var pl = patternListStack.pop(); - re += pl.close; - if (pl.type === "!") { - negativeLists.push(pl); - } - pl.reEnd = re.length; - continue; - case "|": - if (inClass || !patternListStack.length || escaping) { - re += "\\|"; - escaping = false; - continue; - } - clearStateChar(); - re += "|"; - continue; - case "[": - clearStateChar(); - if (inClass) { - re += "\\" + c; - continue; + return createDebug.colors[Math.abs(hash) % createDebug.colors.length]; + } + createDebug.selectColor = selectColor; + function createDebug(namespace) { + let prevTime; + let enableOverride = null; + let namespacesCache; + let enabledCache; + function debug5(...args) { + if (!debug5.enabled) { + return; + } + const self2 = debug5; + const curr = Number(/* @__PURE__ */ new Date()); + const ms = curr - (prevTime || curr); + self2.diff = ms; + self2.prev = prevTime; + self2.curr = curr; + prevTime = curr; + args[0] = createDebug.coerce(args[0]); + if (typeof args[0] !== "string") { + args.unshift("%O"); + } + let index = 0; + args[0] = args[0].replace(/%([a-zA-Z%])/g, (match2, format) => { + if (match2 === "%%") { + return "%"; } - inClass = true; - classStart = i2; - reClassStart = re.length; - re += c; - continue; - case "]": - if (i2 === classStart + 1 || !inClass) { - re += "\\" + c; - escaping = false; - continue; + index++; + const formatter = createDebug.formatters[format]; + if (typeof formatter === "function") { + const val = args[index]; + match2 = formatter.call(self2, val); + args.splice(index, 1); + index--; } - var cs = pattern.substring(classStart + 1, i2); - try { - RegExp("[" + cs + "]"); - } catch (er) { - var sp = this.parse(cs, SUBPARSE); - re = re.substr(0, reClassStart) + "\\[" + sp[0] + "\\]"; - hasMagic = hasMagic || sp[1]; - inClass = false; - continue; + return match2; + }); + createDebug.formatArgs.call(self2, args); + const logFn = self2.log || createDebug.log; + logFn.apply(self2, args); + } + debug5.namespace = namespace; + debug5.useColors = createDebug.useColors(); + debug5.color = createDebug.selectColor(namespace); + debug5.extend = extend; + debug5.destroy = createDebug.destroy; + Object.defineProperty(debug5, "enabled", { + enumerable: true, + configurable: false, + get: () => { + if (enableOverride !== null) { + return enableOverride; } - hasMagic = true; - inClass = false; - re += c; - continue; - default: - clearStateChar(); - if (escaping) { - escaping = false; - } else if (reSpecials2[c] && !(c === "^" && inClass)) { - re += "\\"; + if (namespacesCache !== createDebug.namespaces) { + namespacesCache = createDebug.namespaces; + enabledCache = createDebug.enabled(namespace); } - re += c; + return enabledCache; + }, + set: (v) => { + enableOverride = v; + } + }); + if (typeof createDebug.init === "function") { + createDebug.init(debug5); } + return debug5; } - if (inClass) { - cs = pattern.substr(classStart + 1); - sp = this.parse(cs, SUBPARSE); - re = re.substr(0, reClassStart) + "\\[" + sp[0]; - hasMagic = hasMagic || sp[1]; + function extend(namespace, delimiter) { + const newDebug = createDebug(this.namespace + (typeof delimiter === "undefined" ? ":" : delimiter) + namespace); + newDebug.log = this.log; + return newDebug; } - for (pl = patternListStack.pop(); pl; pl = patternListStack.pop()) { - var tail = re.slice(pl.reStart + pl.open.length); - this.debug("setting tail", re, pl); - tail = tail.replace(/((?:\\{2}){0,64})(\\?)\|/g, function(_, $1, $2) { - if (!$2) { - $2 = "\\"; + function enable(namespaces) { + createDebug.save(namespaces); + createDebug.namespaces = namespaces; + createDebug.names = []; + createDebug.skips = []; + let i2; + const split = (typeof namespaces === "string" ? namespaces : "").split(/[\s,]+/); + const len = split.length; + for (i2 = 0; i2 < len; i2++) { + if (!split[i2]) { + continue; } - return $1 + $1 + $2 + "|"; - }); - this.debug("tail=%j\n %s", tail, tail, pl, re); - var t2 = pl.type === "*" ? star3 : pl.type === "?" ? qmark3 : "\\" + pl.type; - hasMagic = true; - re = re.slice(0, pl.reStart) + t2 + "\\(" + tail; - } - clearStateChar(); - if (escaping) { - re += "\\\\"; + namespaces = split[i2].replace(/\*/g, ".*?"); + if (namespaces[0] === "-") { + createDebug.skips.push(new RegExp("^" + namespaces.slice(1) + "$")); + } else { + createDebug.names.push(new RegExp("^" + namespaces + "$")); + } + } } - var addPatternStart2 = false; - switch (re.charAt(0)) { - case "[": - case ".": - case "(": - addPatternStart2 = true; + function disable() { + const namespaces = [ + ...createDebug.names.map(toNamespace), + ...createDebug.skips.map(toNamespace).map((namespace) => "-" + namespace) + ].join(","); + createDebug.enable(""); + return namespaces; } - for (var n = negativeLists.length - 1; n > -1; n--) { - var nl = negativeLists[n]; - var nlBefore = re.slice(0, nl.reStart); - var nlFirst = re.slice(nl.reStart, nl.reEnd - 8); - var nlLast = re.slice(nl.reEnd - 8, nl.reEnd); - var nlAfter = re.slice(nl.reEnd); - nlLast += nlAfter; - var openParensBefore = nlBefore.split("(").length - 1; - var cleanAfter = nlAfter; - for (i2 = 0; i2 < openParensBefore; i2++) { - cleanAfter = cleanAfter.replace(/\)[+*?]?/, ""); + function enabled(name) { + if (name[name.length - 1] === "*") { + return true; } - nlAfter = cleanAfter; - var dollar = ""; - if (nlAfter === "" && isSub !== SUBPARSE) { - dollar = "$"; + let i2; + let len; + for (i2 = 0, len = createDebug.skips.length; i2 < len; i2++) { + if (createDebug.skips[i2].test(name)) { + return false; + } } - var newRe = nlBefore + nlFirst + nlAfter + dollar + nlLast; - re = newRe; + for (i2 = 0, len = createDebug.names.length; i2 < len; i2++) { + if (createDebug.names[i2].test(name)) { + return true; + } + } + return false; } - if (re !== "" && hasMagic) { - re = "(?=.)" + re; + function toNamespace(regexp) { + return regexp.toString().substring(2, regexp.toString().length - 2).replace(/\.\*\?$/, "*"); } - if (addPatternStart2) { - re = patternStart + re; + function coerce(val) { + if (val instanceof Error) { + return val.stack || val.message; + } + return val; } - if (isSub === SUBPARSE) { - return [re, hasMagic]; + function destroy() { + console.warn("Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`."); } - if (!hasMagic) { - return globUnescape(pattern); + createDebug.enable(createDebug.load()); + return createDebug; + } + module2.exports = setup; + } +}); + +// node_modules/simple-git/node_modules/debug/src/browser.js +var require_browser2 = __commonJS({ + "node_modules/simple-git/node_modules/debug/src/browser.js"(exports2, module2) { + "use strict"; + exports2.formatArgs = formatArgs; + exports2.save = save; + exports2.load = load; + exports2.useColors = useColors; + exports2.storage = localstorage(); + exports2.destroy = /* @__PURE__ */ (() => { + let warned = false; + return () => { + if (!warned) { + warned = true; + console.warn("Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`."); + } + }; + })(); + exports2.colors = [ + "#0000CC", + "#0000FF", + "#0033CC", + "#0033FF", + "#0066CC", + "#0066FF", + "#0099CC", + "#0099FF", + "#00CC00", + "#00CC33", + "#00CC66", + "#00CC99", + "#00CCCC", + "#00CCFF", + "#3300CC", + "#3300FF", + "#3333CC", + "#3333FF", + "#3366CC", + "#3366FF", + "#3399CC", + "#3399FF", + "#33CC00", + "#33CC33", + "#33CC66", + "#33CC99", + "#33CCCC", + "#33CCFF", + "#6600CC", + "#6600FF", + "#6633CC", + "#6633FF", + "#66CC00", + "#66CC33", + "#9900CC", + "#9900FF", + "#9933CC", + "#9933FF", + "#99CC00", + "#99CC33", + "#CC0000", + "#CC0033", + "#CC0066", + "#CC0099", + "#CC00CC", + "#CC00FF", + "#CC3300", + "#CC3333", + "#CC3366", + "#CC3399", + "#CC33CC", + "#CC33FF", + "#CC6600", + "#CC6633", + "#CC9900", + "#CC9933", + "#CCCC00", + "#CCCC33", + "#FF0000", + "#FF0033", + "#FF0066", + "#FF0099", + "#FF00CC", + "#FF00FF", + "#FF3300", + "#FF3333", + "#FF3366", + "#FF3399", + "#FF33CC", + "#FF33FF", + "#FF6600", + "#FF6633", + "#FF9900", + "#FF9933", + "#FFCC00", + "#FFCC33" + ]; + function useColors() { + if (typeof window !== "undefined" && window.process && (window.process.type === "renderer" || window.process.__nwjs)) { + return true; } - var flags = options.nocase ? "i" : ""; - try { - var regExp = new RegExp("^" + re + "$", flags); - } catch (er) { - return new RegExp("$."); + if (typeof navigator !== "undefined" && navigator.userAgent && navigator.userAgent.toLowerCase().match(/(edge|trident)\/(\d+)/)) { + return false; } - regExp._glob = pattern; - regExp._src = re; - return regExp; + let m2; + return typeof document !== "undefined" && document.documentElement && document.documentElement.style && document.documentElement.style.WebkitAppearance || // Is firebug? http://stackoverflow.com/a/398120/376773 + typeof window !== "undefined" && window.console && (window.console.firebug || window.console.exception && window.console.table) || // Is firefox >= v31? + // https://developer.mozilla.org/en-US/docs/Tools/Web_Console#Styling_messages + typeof navigator !== "undefined" && navigator.userAgent && (m2 = navigator.userAgent.toLowerCase().match(/firefox\/(\d+)/)) && parseInt(m2[1], 10) >= 31 || // Double check webkit in userAgent just in case we are in a worker + typeof navigator !== "undefined" && navigator.userAgent && navigator.userAgent.toLowerCase().match(/applewebkit\/(\d+)/); } - minimatch2.makeRe = function(pattern, options) { - return new Minimatch2(pattern, options || {}).makeRe(); - }; - Minimatch2.prototype.makeRe = makeRe2; - function makeRe2() { - if (this.regexp || this.regexp === false) - return this.regexp; - var set2 = this.set; - if (!set2.length) { - this.regexp = false; - return this.regexp; + function formatArgs(args) { + args[0] = (this.useColors ? "%c" : "") + this.namespace + (this.useColors ? " %c" : " ") + args[0] + (this.useColors ? "%c " : " ") + "+" + module2.exports.humanize(this.diff); + if (!this.useColors) { + return; } - var options = this.options; - var twoStar = options.noglobstar ? star3 : options.dot ? twoStarDot2 : twoStarNoDot2; - var flags = options.nocase ? "i" : ""; - var re = set2.map(function(pattern) { - return pattern.map(function(p) { - return p === GLOBSTAR2 ? twoStar : typeof p === "string" ? regExpEscape3(p) : p._src; - }).join("\\/"); - }).join("|"); - re = "^(?:" + re + ")$"; - if (this.negate) - re = "^(?!" + re + ").*$"; + const c = "color: " + this.color; + args.splice(1, 0, c, "color: inherit"); + let index = 0; + let lastC = 0; + args[0].replace(/%[a-zA-Z%]/g, (match2) => { + if (match2 === "%%") { + return; + } + index++; + if (match2 === "%c") { + lastC = index; + } + }); + args.splice(lastC, 0, c); + } + exports2.log = console.debug || console.log || (() => { + }); + function save(namespaces) { try { - this.regexp = new RegExp(re, flags); - } catch (ex) { - this.regexp = false; + if (namespaces) { + exports2.storage.setItem("debug", namespaces); + } else { + exports2.storage.removeItem("debug"); + } + } catch (error) { } - return this.regexp; } - minimatch2.match = function(list, pattern, options) { - options = options || {}; - var mm = new Minimatch2(pattern, options); - list = list.filter(function(f3) { - return mm.match(f3); - }); - if (mm.options.nonull && !list.length) { - list.push(pattern); + function load() { + let r2; + try { + r2 = exports2.storage.getItem("debug"); + } catch (error) { } - return list; - }; - Minimatch2.prototype.match = function match2(f3, partial) { - if (typeof partial === "undefined") - partial = this.partial; - this.debug("match", f3, this.pattern); - if (this.comment) - return false; - if (this.empty) - return f3 === ""; - if (f3 === "/" && partial) - return true; - var options = this.options; - if (path2.sep !== "/") { - f3 = f3.split(path2.sep).join("/"); + if (!r2 && typeof process !== "undefined" && "env" in process) { + r2 = process.env.DEBUG; } - f3 = f3.split(slashSplit); - this.debug(this.pattern, "split", f3); - var set2 = this.set; - this.debug(this.pattern, "set", set2); - var filename; - var i2; - for (i2 = f3.length - 1; i2 >= 0; i2--) { - filename = f3[i2]; - if (filename) - break; + return r2; + } + function localstorage() { + try { + return localStorage; + } catch (error) { } - for (i2 = 0; i2 < set2.length; i2++) { - var pattern = set2[i2]; - var file = f3; - if (options.matchBase && pattern.length === 1) { - file = [filename]; - } - var hit = this.matchOne(file, pattern, partial); - if (hit) { - if (options.flipNegate) - return true; - return !this.negate; - } + } + module2.exports = require_common2()(exports2); + var { formatters } = module2.exports; + formatters.j = function(v) { + try { + return JSON.stringify(v); + } catch (error) { + return "[UnexpectedJSONParseError]: " + error.message; } - if (options.flipNegate) - return false; - return this.negate; }; - Minimatch2.prototype.matchOne = function(file, pattern, partial) { - var options = this.options; - this.debug( - "matchOne", - { "this": this, file, pattern } - ); - this.debug("matchOne", file.length, pattern.length); - for (var fi = 0, pi = 0, fl = file.length, pl = pattern.length; fi < fl && pi < pl; fi++, pi++) { - this.debug("matchOne loop"); - var p = pattern[pi]; - var f3 = file[fi]; - this.debug(pattern, p, f3); - if (p === false) - return false; - if (p === GLOBSTAR2) { - this.debug("GLOBSTAR", [pattern, p, f3]); - var fr = fi; - var pr = pi + 1; - if (pr === pl) { - this.debug("** at the end"); - for (; fi < fl; fi++) { - if (file[fi] === "." || file[fi] === ".." || !options.dot && file[fi].charAt(0) === ".") - return false; - } - return true; - } - while (fr < fl) { - var swallowee = file[fr]; - this.debug("\nglobstar while", file, fr, pattern, pr, swallowee); - if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) { - this.debug("globstar found match!", fr, fl, swallowee); - return true; - } else { - if (swallowee === "." || swallowee === ".." || !options.dot && swallowee.charAt(0) === ".") { - this.debug("dot detected!", file, fr, pattern, pr); - break; - } - this.debug("globstar swallow a segment, and continue"); - fr++; - } + } +}); + +// node_modules/simple-git/node_modules/debug/src/node.js +var require_node2 = __commonJS({ + "node_modules/simple-git/node_modules/debug/src/node.js"(exports2, module2) { + "use strict"; + var tty2 = require("tty"); + var util = require("util"); + exports2.init = init; + exports2.log = log; + exports2.formatArgs = formatArgs; + exports2.save = save; + exports2.load = load; + exports2.useColors = useColors; + exports2.destroy = util.deprecate( + () => { + }, + "Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`." + ); + exports2.colors = [6, 2, 3, 4, 5, 1]; + try { + const supportsColor2 = require_supports_color(); + if (supportsColor2 && (supportsColor2.stderr || supportsColor2).level >= 2) { + exports2.colors = [ + 20, + 21, + 26, + 27, + 32, + 33, + 38, + 39, + 40, + 41, + 42, + 43, + 44, + 45, + 56, + 57, + 62, + 63, + 68, + 69, + 74, + 75, + 76, + 77, + 78, + 79, + 80, + 81, + 92, + 93, + 98, + 99, + 112, + 113, + 128, + 129, + 134, + 135, + 148, + 149, + 160, + 161, + 162, + 163, + 164, + 165, + 166, + 167, + 168, + 169, + 170, + 171, + 172, + 173, + 178, + 179, + 184, + 185, + 196, + 197, + 198, + 199, + 200, + 201, + 202, + 203, + 204, + 205, + 206, + 207, + 208, + 209, + 214, + 215, + 220, + 221 + ]; + } + } catch (error) { + } + exports2.inspectOpts = Object.keys(process.env).filter((key) => { + return /^debug_/i.test(key); + }).reduce((obj, key) => { + const prop = key.substring(6).toLowerCase().replace(/_([a-z])/g, (_, k) => { + return k.toUpperCase(); + }); + let val = process.env[key]; + if (/^(yes|on|true|enabled)$/i.test(val)) { + val = true; + } else if (/^(no|off|false|disabled)$/i.test(val)) { + val = false; + } else if (val === "null") { + val = null; + } else { + val = Number(val); + } + obj[prop] = val; + return obj; + }, {}); + function useColors() { + return "colors" in exports2.inspectOpts ? Boolean(exports2.inspectOpts.colors) : tty2.isatty(process.stderr.fd); + } + function formatArgs(args) { + const { namespace: name, useColors: useColors2 } = this; + if (useColors2) { + const c = this.color; + const colorCode = "\x1B[3" + (c < 8 ? c : "8;5;" + c); + const prefix = ` ${colorCode};1m${name} \x1B[0m`; + args[0] = prefix + args[0].split("\n").join("\n" + prefix); + args.push(colorCode + "m+" + module2.exports.humanize(this.diff) + "\x1B[0m"); + } else { + args[0] = getDate() + name + " " + args[0]; + } + } + function getDate() { + if (exports2.inspectOpts.hideDate) { + return ""; + } + return (/* @__PURE__ */ new Date()).toISOString() + " "; + } + function log(...args) { + return process.stderr.write(util.formatWithOptions(exports2.inspectOpts, ...args) + "\n"); + } + function save(namespaces) { + if (namespaces) { + process.env.DEBUG = namespaces; + } else { + delete process.env.DEBUG; + } + } + function load() { + return process.env.DEBUG; + } + function init(debug5) { + debug5.inspectOpts = {}; + const keys = Object.keys(exports2.inspectOpts); + for (let i2 = 0; i2 < keys.length; i2++) { + debug5.inspectOpts[keys[i2]] = exports2.inspectOpts[keys[i2]]; + } + } + module2.exports = require_common2()(exports2); + var { formatters } = module2.exports; + formatters.o = function(v) { + this.inspectOpts.colors = this.useColors; + return util.inspect(v, this.inspectOpts).split("\n").map((str) => str.trim()).join(" "); + }; + formatters.O = function(v) { + this.inspectOpts.colors = this.useColors; + return util.inspect(v, this.inspectOpts); + }; + } +}); + +// node_modules/simple-git/node_modules/debug/src/index.js +var require_src3 = __commonJS({ + "node_modules/simple-git/node_modules/debug/src/index.js"(exports2, module2) { + "use strict"; + if (typeof process === "undefined" || process.type === "renderer" || process.browser === true || process.__nwjs) { + module2.exports = require_browser2(); + } else { + module2.exports = require_node2(); + } + } +}); + +// node_modules/@kwsites/promise-deferred/dist/index.js +var require_dist2 = __commonJS({ + "node_modules/@kwsites/promise-deferred/dist/index.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.createDeferred = exports2.deferred = void 0; + function deferred2() { + let done; + let fail; + let status = "pending"; + const promise = new Promise((_done, _fail) => { + done = _done; + fail = _fail; + }); + return { + promise, + done(result) { + if (status === "pending") { + status = "resolved"; + done(result); } - if (partial) { - this.debug("\n>>> no match, partial?", file, fr, pattern, pr); - if (fr === fl) - return true; + }, + fail(error) { + if (status === "pending") { + status = "rejected"; + fail(error); } - return false; - } - var hit; - if (typeof p === "string") { - hit = f3 === p; - this.debug("string match", p, f3, hit); - } else { - hit = f3.match(p); - this.debug("pattern match", p, f3, hit); + }, + get fulfilled() { + return status !== "pending"; + }, + get status() { + return status; } - if (!hit) - return false; - } - if (fi === fl && pi === pl) { + }; + } + exports2.deferred = deferred2; + exports2.createDeferred = deferred2; + exports2.default = deferred2; + } +}); + +// node_modules/locate-path/node_modules/path-exists/index.js +var require_path_exists = __commonJS({ + "node_modules/locate-path/node_modules/path-exists/index.js"(exports2, module2) { + "use strict"; + var fs2 = require("fs"); + module2.exports = (fp) => new Promise((resolve) => { + fs2.access(fp, (err) => { + resolve(!err); + }); + }); + module2.exports.sync = (fp) => { + try { + fs2.accessSync(fp); return true; - } else if (fi === fl) { - return partial; - } else if (pi === pl) { - return fi === fl - 1 && file[fi] === ""; + } catch (err) { + return false; } - throw new Error("wtf?"); }; - function globUnescape(s2) { - return s2.replace(/\\(.)/g, "$1"); - } - function regExpEscape3(s2) { - return s2.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, "\\$&"); - } } }); -// node_modules/inherits/inherits_browser.js -var require_inherits_browser = __commonJS({ - "node_modules/inherits/inherits_browser.js"(exports2, module2) { +// node_modules/p-try/index.js +var require_p_try = __commonJS({ + "node_modules/p-try/index.js"(exports2, module2) { "use strict"; - if (typeof Object.create === "function") { - module2.exports = function inherits(ctor, superCtor) { - if (superCtor) { - ctor.super_ = superCtor; - ctor.prototype = Object.create(superCtor.prototype, { - constructor: { - value: ctor, - enumerable: false, - writable: true, - configurable: true - } - }); + module2.exports = (cb) => new Promise((resolve) => { + resolve(cb()); + }); + } +}); + +// node_modules/p-locate/node_modules/p-limit/index.js +var require_p_limit = __commonJS({ + "node_modules/p-locate/node_modules/p-limit/index.js"(exports2, module2) { + "use strict"; + var pTry = require_p_try(); + module2.exports = (concurrency) => { + if (concurrency < 1) { + throw new TypeError("Expected `concurrency` to be a number from 1 and up"); + } + const queue = []; + let activeCount = 0; + const next = () => { + activeCount--; + if (queue.length > 0) { + queue.shift()(); } }; - } else { - module2.exports = function inherits(ctor, superCtor) { - if (superCtor) { - ctor.super_ = superCtor; - var TempCtor = function() { - }; - TempCtor.prototype = superCtor.prototype; - ctor.prototype = new TempCtor(); - ctor.prototype.constructor = ctor; + return (fn) => new Promise((resolve, reject) => { + const run2 = () => { + activeCount++; + pTry(fn).then( + (val) => { + resolve(val); + next(); + }, + (err) => { + reject(err); + next(); + } + ); + }; + if (activeCount < concurrency) { + run2(); + } else { + queue.push(run2); } - }; - } + }); + }; } }); -// node_modules/inherits/inherits.js -var require_inherits = __commonJS({ - "node_modules/inherits/inherits.js"(exports2, module2) { +// node_modules/p-locate/index.js +var require_p_locate = __commonJS({ + "node_modules/p-locate/index.js"(exports2, module2) { "use strict"; - try { - util = require("util"); - if (typeof util.inherits !== "function") - throw ""; - module2.exports = util.inherits; - } catch (e2) { - module2.exports = require_inherits_browser(); - } - var util; + var pLimit = require_p_limit(); + var EndError = class extends Error { + constructor(value) { + super(); + this.value = value; + } + }; + var finder = (el) => Promise.all(el).then((val) => val[1] === true && Promise.reject(new EndError(val[0]))); + module2.exports = (iterable, tester, opts) => { + opts = Object.assign({ + concurrency: Infinity, + preserveOrder: true + }, opts); + const limit = pLimit(opts.concurrency); + const items = Array.from(iterable).map((el) => [el, limit(() => Promise.resolve(el).then(tester))]); + const checkLimit = pLimit(opts.preserveOrder ? 1 : Infinity); + return Promise.all(items.map((el) => checkLimit(() => finder(el)))).then(() => { + }).catch((err) => err instanceof EndError ? err.value : Promise.reject(err)); + }; } }); -// node_modules/path-is-absolute/index.js -var require_path_is_absolute = __commonJS({ - "node_modules/path-is-absolute/index.js"(exports2, module2) { +// node_modules/locate-path/index.js +var require_locate_path = __commonJS({ + "node_modules/locate-path/index.js"(exports2, module2) { "use strict"; - function posix(path2) { - return path2.charAt(0) === "/"; - } - function win32(path2) { - var splitDeviceRe = /^([a-zA-Z]:|[\\\/]{2}[^\\\/]+[\\\/]+[^\\\/]+)?([\\\/])?([\s\S]*?)$/; - var result = splitDeviceRe.exec(path2); - var device = result[1] || ""; - var isUnc = Boolean(device && device.charAt(1) !== ":"); - return Boolean(result[2] || isUnc); - } - module2.exports = process.platform === "win32" ? win32 : posix; - module2.exports.posix = posix; - module2.exports.win32 = win32; + var path2 = require("path"); + var pathExists = require_path_exists(); + var pLocate = require_p_locate(); + module2.exports = (iterable, opts) => { + opts = Object.assign({ + cwd: process.cwd() + }, opts); + return pLocate(iterable, (el) => pathExists(path2.resolve(opts.cwd, el)), opts); + }; + module2.exports.sync = (iterable, opts) => { + opts = Object.assign({ + cwd: process.cwd() + }, opts); + for (const el of iterable) { + if (pathExists.sync(path2.resolve(opts.cwd, el))) { + return el; + } + } + }; } }); -// node_modules/glob/common.js -var require_common2 = __commonJS({ - "node_modules/glob/common.js"(exports2) { +// node_modules/find-up/index.js +var require_find_up = __commonJS({ + "node_modules/find-up/index.js"(exports2, module2) { "use strict"; - exports2.setopts = setopts; - exports2.ownProp = ownProp; - exports2.makeAbs = makeAbs; - exports2.finish = finish; - exports2.mark = mark; - exports2.isIgnored = isIgnored; - exports2.childrenIgnored = childrenIgnored; - function ownProp(obj, field) { - return Object.prototype.hasOwnProperty.call(obj, field); - } - var fs2 = require("fs"); var path2 = require("path"); - var minimatch2 = require_minimatch(); - var isAbsolute = require_path_is_absolute(); - var Minimatch2 = minimatch2.Minimatch; - function alphasort(a, b) { - return a.localeCompare(b, "en"); - } - function setupIgnores(self2, options) { - self2.ignore = options.ignore || []; - if (!Array.isArray(self2.ignore)) - self2.ignore = [self2.ignore]; - if (self2.ignore.length) { - self2.ignore = self2.ignore.map(ignoreMap); + var locatePath = require_locate_path(); + module2.exports = (filename, opts) => { + opts = opts || {}; + const startDir = path2.resolve(opts.cwd || ""); + const root = path2.parse(startDir).root; + const filenames = [].concat(filename); + return new Promise((resolve) => { + (function find(dir) { + locatePath(filenames, { cwd: dir }).then((file) => { + if (file) { + resolve(path2.join(dir, file)); + } else if (dir === root) { + resolve(null); + } else { + find(path2.dirname(dir)); + } + }); + })(startDir); + }); + }; + module2.exports.sync = (filename, opts) => { + opts = opts || {}; + let dir = path2.resolve(opts.cwd || ""); + const root = path2.parse(dir).root; + const filenames = [].concat(filename); + while (true) { + const file = locatePath.sync(filenames, { cwd: dir }); + if (file) { + return path2.join(dir, file); + } else if (dir === root) { + return null; + } + dir = path2.dirname(dir); } - } - function ignoreMap(pattern) { - var gmatcher = null; - if (pattern.slice(-3) === "/**") { - var gpattern = pattern.replace(/(\/\*\*)+$/, ""); - gmatcher = new Minimatch2(gpattern, { dot: true }); + }; + } +}); + +// node_modules/codeowners/node_modules/ignore/ignore.js +var require_ignore = __commonJS({ + "node_modules/codeowners/node_modules/ignore/ignore.js"(exports2, module2) { + "use strict"; + var _createClass = /* @__PURE__ */ function() { + function defineProperties(target, props) { + for (var i2 = 0; i2 < props.length; i2++) { + var descriptor = props[i2]; + descriptor.enumerable = descriptor.enumerable || false; + descriptor.configurable = true; + if ("value" in descriptor) descriptor.writable = true; + Object.defineProperty(target, descriptor.key, descriptor); + } } - return { - matcher: new Minimatch2(pattern, { dot: true }), - gmatcher + return function(Constructor, protoProps, staticProps) { + if (protoProps) defineProperties(Constructor.prototype, protoProps); + if (staticProps) defineProperties(Constructor, staticProps); + return Constructor; }; + }(); + function _classCallCheck(instance, Constructor) { + if (!(instance instanceof Constructor)) { + throw new TypeError("Cannot call a class as a function"); + } } - function setopts(self2, pattern, options) { - if (!options) - options = {}; - if (options.matchBase && -1 === pattern.indexOf("/")) { - if (options.noglobstar) { - throw new Error("base matching requires globstar"); - } - pattern = "**/" + pattern; - } - self2.silent = !!options.silent; - self2.pattern = pattern; - self2.strict = options.strict !== false; - self2.realpath = !!options.realpath; - self2.realpathCache = options.realpathCache || /* @__PURE__ */ Object.create(null); - self2.follow = !!options.follow; - self2.dot = !!options.dot; - self2.mark = !!options.mark; - self2.nodir = !!options.nodir; - if (self2.nodir) - self2.mark = true; - self2.sync = !!options.sync; - self2.nounique = !!options.nounique; - self2.nonull = !!options.nonull; - self2.nosort = !!options.nosort; - self2.nocase = !!options.nocase; - self2.stat = !!options.stat; - self2.noprocess = !!options.noprocess; - self2.absolute = !!options.absolute; - self2.fs = options.fs || fs2; - self2.maxLength = options.maxLength || Infinity; - self2.cache = options.cache || /* @__PURE__ */ Object.create(null); - self2.statCache = options.statCache || /* @__PURE__ */ Object.create(null); - self2.symlinks = options.symlinks || /* @__PURE__ */ Object.create(null); - setupIgnores(self2, options); - self2.changedCwd = false; - var cwd = process.cwd(); - if (!ownProp(options, "cwd")) - self2.cwd = cwd; - else { - self2.cwd = path2.resolve(options.cwd); - self2.changedCwd = self2.cwd !== cwd; - } - self2.root = options.root || path2.resolve(self2.cwd, "/"); - self2.root = path2.resolve(self2.root); - if (process.platform === "win32") - self2.root = self2.root.replace(/\\/g, "/"); - self2.cwdAbs = isAbsolute(self2.cwd) ? self2.cwd : makeAbs(self2, self2.cwd); - if (process.platform === "win32") - self2.cwdAbs = self2.cwdAbs.replace(/\\/g, "/"); - self2.nomount = !!options.nomount; - options.nonegate = true; - options.nocomment = true; - options.allowWindowsEscape = false; - self2.minimatch = new Minimatch2(pattern, options); - self2.options = self2.minimatch.options; + module2.exports = function() { + return new IgnoreBase(); + }; + function make_array(subject) { + return Array.isArray(subject) ? subject : [subject]; } - function finish(self2) { - var nou = self2.nounique; - var all = nou ? [] : /* @__PURE__ */ Object.create(null); - for (var i2 = 0, l = self2.matches.length; i2 < l; i2++) { - var matches = self2.matches[i2]; - if (!matches || Object.keys(matches).length === 0) { - if (self2.nonull) { - var literal = self2.minimatch.globSet[i2]; - if (nou) - all.push(literal); - else - all[literal] = true; + var REGEX_BLANK_LINE = /^\s+$/; + var REGEX_LEADING_EXCAPED_EXCLAMATION = /^\\\!/; + var REGEX_LEADING_EXCAPED_HASH = /^\\#/; + var SLASH = "/"; + var KEY_IGNORE = typeof Symbol !== "undefined" ? Symbol.for("node-ignore") : "node-ignore"; + var IgnoreBase = function() { + function IgnoreBase2() { + _classCallCheck(this, IgnoreBase2); + this._rules = []; + this[KEY_IGNORE] = true; + this._initCache(); + } + _createClass(IgnoreBase2, [{ + key: "_initCache", + value: function _initCache() { + this._cache = {}; + } + // @param {Array.|string|Ignore} pattern + }, { + key: "add", + value: function add(pattern) { + this._added = false; + if (typeof pattern === "string") { + pattern = pattern.split(/\r?\n/g); } - } else { - var m2 = Object.keys(matches); - if (nou) - all.push.apply(all, m2); - else - m2.forEach(function(m3) { - all[m3] = true; - }); + make_array(pattern).forEach(this._addPattern, this); + if (this._added) { + this._initCache(); + } + return this; } - } - if (!nou) - all = Object.keys(all); - if (!self2.nosort) - all = all.sort(alphasort); - if (self2.mark) { - for (var i2 = 0; i2 < all.length; i2++) { - all[i2] = self2._mark(all[i2]); + // legacy + }, { + key: "addPattern", + value: function addPattern(pattern) { + return this.add(pattern); } - if (self2.nodir) { - all = all.filter(function(e2) { - var notDir = !/\/$/.test(e2); - var c = self2.cache[e2] || self2.cache[makeAbs(self2, e2)]; - if (notDir && c) - notDir = c !== "DIR" && !Array.isArray(c); - return notDir; + }, { + key: "_addPattern", + value: function _addPattern(pattern) { + if (pattern && pattern[KEY_IGNORE]) { + this._rules = this._rules.concat(pattern._rules); + this._added = true; + return; + } + if (this._checkPattern(pattern)) { + var rule = this._createRule(pattern); + this._added = true; + this._rules.push(rule); + } + } + }, { + key: "_checkPattern", + value: function _checkPattern(pattern) { + return pattern && typeof pattern === "string" && !REGEX_BLANK_LINE.test(pattern) && pattern.indexOf("#") !== 0; + } + }, { + key: "filter", + value: function filter3(paths) { + var _this = this; + return make_array(paths).filter(function(path2) { + return _this._filter(path2); }); } - } - if (self2.ignore.length) - all = all.filter(function(m3) { - return !isIgnored(self2, m3); - }); - self2.found = all; - } - function mark(self2, p) { - var abs = makeAbs(self2, p); - var c = self2.cache[abs]; - var m2 = p; - if (c) { - var isDir = c === "DIR" || Array.isArray(c); - var slash = p.slice(-1) === "/"; - if (isDir && !slash) - m2 += "/"; - else if (!isDir && slash) - m2 = m2.slice(0, -1); - if (m2 !== p) { - var mabs = makeAbs(self2, m2); - self2.statCache[mabs] = self2.statCache[abs]; - self2.cache[mabs] = self2.cache[abs]; + }, { + key: "createFilter", + value: function createFilter() { + var _this2 = this; + return function(path2) { + return _this2._filter(path2); + }; } + }, { + key: "ignores", + value: function ignores(path2) { + return !this._filter(path2); + } + }, { + key: "_createRule", + value: function _createRule(pattern) { + var origin = pattern; + var negative = false; + if (pattern.indexOf("!") === 0) { + negative = true; + pattern = pattern.substr(1); + } + pattern = pattern.replace(REGEX_LEADING_EXCAPED_EXCLAMATION, "!").replace(REGEX_LEADING_EXCAPED_HASH, "#"); + var regex = make_regex(pattern, negative); + return { + origin, + pattern, + negative, + regex + }; + } + // @returns `Boolean` true if the `path` is NOT ignored + }, { + key: "_filter", + value: function _filter(path2, slices) { + if (!path2) { + return false; + } + if (path2 in this._cache) { + return this._cache[path2]; + } + if (!slices) { + slices = path2.split(SLASH); + } + slices.pop(); + return this._cache[path2] = slices.length ? this._filter(slices.join(SLASH) + SLASH, slices) && this._test(path2) : this._test(path2); + } + // @returns {Boolean} true if a file is NOT ignored + }, { + key: "_test", + value: function _test(path2) { + var matched = 0; + this._rules.forEach(function(rule) { + if (!(matched ^ rule.negative)) { + matched = rule.negative ^ rule.regex.test(path2); + } + }); + return !matched; + } + }]); + return IgnoreBase2; + }(); + var DEFAULT_REPLACER_PREFIX = [ + // > Trailing spaces are ignored unless they are quoted with backslash ("\") + [ + // (a\ ) -> (a ) + // (a ) -> (a) + // (a \ ) -> (a ) + /\\?\s+$/, + function(match2) { + return match2.indexOf("\\") === 0 ? " " : ""; + } + ], + // replace (\ ) with ' ' + [/\\\s/g, function() { + return " "; + }], + // Escape metacharacters + // which is written down by users but means special for regular expressions. + // > There are 12 characters with special meanings: + // > - the backslash \, + // > - the caret ^, + // > - the dollar sign $, + // > - the period or dot ., + // > - the vertical bar or pipe symbol |, + // > - the question mark ?, + // > - the asterisk or star *, + // > - the plus sign +, + // > - the opening parenthesis (, + // > - the closing parenthesis ), + // > - and the opening square bracket [, + // > - the opening curly brace {, + // > These special characters are often called "metacharacters". + [/[\\\^$.|?*+()\[{]/g, function(match2) { + return "\\" + match2; + }], + // leading slash + [ + // > A leading slash matches the beginning of the pathname. + // > For example, "/*.c" matches "cat-file.c" but not "mozilla-sha1/sha1.c". + // A leading slash matches the beginning of the pathname + /^\//, + function() { + return "^"; + } + ], + // replace special metacharacter slash after the leading slash + [/\//g, function() { + return "\\/"; + }], + [ + // > A leading "**" followed by a slash means match in all directories. + // > For example, "**/foo" matches file or directory "foo" anywhere, + // > the same as pattern "foo". + // > "**/foo/bar" matches file or directory "bar" anywhere that is directly under directory "foo". + // Notice that the '*'s have been replaced as '\\*' + /^\^*\\\*\\\*\\\//, + // '**/foo' <-> 'foo' + function() { + return "^(?:.*\\/)?"; + } + ] + ]; + var DEFAULT_REPLACER_SUFFIX = [ + // starting + [ + // there will be no leading '/' (which has been replaced by section "leading slash") + // If starts with '**', adding a '^' to the regular expression also works + /^(?=[^\^])/, + function() { + return !/\/(?!$)/.test(this) ? "(?:^|\\/)" : "^"; + } + ], + // two globstars + [ + // Use lookahead assertions so that we could match more than one `'/**'` + /\\\/\\\*\\\*(?=\\\/|$)/g, + // Zero, one or several directories + // should not use '*', or it will be replaced by the next replacer + // Check if it is not the last `'/**'` + function(match2, index, str) { + return index + 6 < str.length ? "(?:\\/[^\\/]+)*" : "\\/.+"; + } + ], + // intermediate wildcards + [ + // Never replace escaped '*' + // ignore rule '\*' will match the path '*' + // 'abc.*/' -> go + // 'abc.*' -> skip this rule + /(^|[^\\]+)\\\*(?=.+)/g, + // '*.js' matches '.js' + // '*.js' doesn't match 'abc' + function(match2, p1) { + return p1 + "[^\\/]*"; + } + ], + // trailing wildcard + [/(\^|\\\/)?\\\*$/, function(match2, p1) { + return (p1 ? p1 + "[^/]+" : "[^/]*") + "(?=$|\\/$)"; + }], + [ + // unescape + /\\\\\\/g, + function() { + return "\\"; + } + ] + ]; + var POSITIVE_REPLACERS = [].concat(DEFAULT_REPLACER_PREFIX, [ + // 'f' + // matches + // - /f(end) + // - /f/ + // - (start)f(end) + // - (start)f/ + // doesn't match + // - oof + // - foo + // pseudo: + // -> (^|/)f(/|$) + // ending + [ + // 'js' will not match 'js.' + // 'ab' will not match 'abc' + /(?:[^*\/])$/, + // 'js*' will not match 'a.js' + // 'js/' will not match 'a.js' + // 'js' will match 'a.js' and 'a.js/' + function(match2) { + return match2 + "(?=$|\\/)"; + } + ] + ], DEFAULT_REPLACER_SUFFIX); + var NEGATIVE_REPLACERS = [].concat(DEFAULT_REPLACER_PREFIX, [ + // #24, #38 + // The MISSING rule of [gitignore docs](https://git-scm.com/docs/gitignore) + // A negative pattern without a trailing wildcard should not + // re-include the things inside that directory. + // eg: + // ['node_modules/*', '!node_modules'] + // should ignore `node_modules/a.js` + [/(?:[^*])$/, function(match2) { + return match2 + "(?=$|\\/$)"; + }] + ], DEFAULT_REPLACER_SUFFIX); + var cache2 = {}; + function make_regex(pattern, negative) { + var r2 = cache2[pattern]; + if (r2) { + return r2; } - return m2; - } - function makeAbs(self2, f3) { - var abs = f3; - if (f3.charAt(0) === "/") { - abs = path2.join(self2.root, f3); - } else if (isAbsolute(f3) || f3 === "") { - abs = f3; - } else if (self2.changedCwd) { - abs = path2.resolve(self2.cwd, f3); - } else { - abs = path2.resolve(f3); - } - if (process.platform === "win32") - abs = abs.replace(/\\/g, "/"); - return abs; - } - function isIgnored(self2, path3) { - if (!self2.ignore.length) - return false; - return self2.ignore.some(function(item) { - return item.matcher.match(path3) || !!(item.gmatcher && item.gmatcher.match(path3)); - }); + var replacers = negative ? NEGATIVE_REPLACERS : POSITIVE_REPLACERS; + var source = replacers.reduce(function(prev, current) { + return prev.replace(current[0], current[1].bind(pattern)); + }, pattern); + return cache2[pattern] = new RegExp(source, "i"); } - function childrenIgnored(self2, path3) { - if (!self2.ignore.length) - return false; - return self2.ignore.some(function(item) { - return !!(item.gmatcher && item.gmatcher.match(path3)); - }); + if ( + // Detect `process` so that it can run in browsers. + typeof process !== "undefined" && (process.env && process.env.IGNORE_TEST_WIN32 || process.platform === "win32") + ) { + filter2 = IgnoreBase.prototype._filter; + make_posix = function make_posix2(str) { + return /^\\\\\?\\/.test(str) || /[^\x00-\x80]+/.test(str) ? str : str.replace(/\\/g, "/"); + }; + IgnoreBase.prototype._filter = function(path2, slices) { + path2 = make_posix(path2); + return filter2.call(this, path2, slices); + }; } + var filter2; + var make_posix; } }); -// node_modules/glob/sync.js -var require_sync = __commonJS({ - "node_modules/glob/sync.js"(exports2, module2) { +// node_modules/is-directory/index.js +var require_is_directory = __commonJS({ + "node_modules/is-directory/index.js"(exports2, module2) { "use strict"; - module2.exports = globSync; - globSync.GlobSync = GlobSync; - var rp = require_fs(); - var minimatch2 = require_minimatch(); - var Minimatch2 = minimatch2.Minimatch; - var Glob = require_glob().Glob; - var util = require("util"); - var path2 = require("path"); - var assert = require("assert"); - var isAbsolute = require_path_is_absolute(); - var common = require_common2(); - var setopts = common.setopts; - var ownProp = common.ownProp; - var childrenIgnored = common.childrenIgnored; - var isIgnored = common.isIgnored; - function globSync(pattern, options) { - if (typeof options === "function" || arguments.length === 3) - throw new TypeError("callback provided to sync glob\nSee: https://github.com/isaacs/node-glob/issues/167"); - return new GlobSync(pattern, options).found; - } - function GlobSync(pattern, options) { - if (!pattern) - throw new Error("must provide pattern"); - if (typeof options === "function" || arguments.length === 3) - throw new TypeError("callback provided to sync glob\nSee: https://github.com/isaacs/node-glob/issues/167"); - if (!(this instanceof GlobSync)) - return new GlobSync(pattern, options); - setopts(this, pattern, options); - if (this.noprocess) - return this; - var n = this.minimatch.set.length; - this.matches = new Array(n); - for (var i2 = 0; i2 < n; i2++) { - this._process(this.minimatch.set[i2], i2, false); + var fs2 = require("fs"); + function isDirectory(filepath, cb) { + if (typeof cb !== "function") { + throw new Error("expected a callback function"); } - this._finish(); - } - GlobSync.prototype._finish = function() { - assert.ok(this instanceof GlobSync); - if (this.realpath) { - var self2 = this; - this.matches.forEach(function(matchset, index) { - var set2 = self2.matches[index] = /* @__PURE__ */ Object.create(null); - for (var p in matchset) { - try { - p = self2._makeAbs(p); - var real = rp.realpathSync(p, self2.realpathCache); - set2[real] = true; - } catch (er) { - if (er.syscall === "stat") - set2[self2._makeAbs(p)] = true; - else - throw er; - } + if (typeof filepath !== "string") { + cb(new Error("expected filepath to be a string")); + return; + } + fs2.stat(filepath, function(err, stats) { + if (err) { + if (err.code === "ENOENT") { + cb(null, false); + return; } - }); + cb(err); + return; + } + cb(null, stats.isDirectory()); + }); + } + isDirectory.sync = function isDirectorySync(filepath) { + if (typeof filepath !== "string") { + throw new Error("expected filepath to be a string"); } - common.finish(this); - }; - GlobSync.prototype._process = function(pattern, index, inGlobStar) { - assert.ok(this instanceof GlobSync); - var n = 0; - while (typeof pattern[n] === "string") { - n++; - } - var prefix; - switch (n) { - case pattern.length: - this._processSimple(pattern.join("/"), index); - return; - case 0: - prefix = null; - break; - default: - prefix = pattern.slice(0, n).join("/"); - break; + try { + var stat2 = fs2.statSync(filepath); + return stat2.isDirectory(); + } catch (err) { + if (err.code === "ENOENT") { + return false; + } else { + throw err; + } } - var remain = pattern.slice(n); - var read; - if (prefix === null) - read = "."; - else if (isAbsolute(prefix) || isAbsolute(pattern.map(function(p) { - return typeof p === "string" ? p : "[*]"; - }).join("/"))) { - if (!prefix || !isAbsolute(prefix)) - prefix = "/" + prefix; - read = prefix; - } else - read = prefix; - var abs = this._makeAbs(read); - if (childrenIgnored(this, read)) - return; - var isGlobStar = remain[0] === minimatch2.GLOBSTAR; - if (isGlobStar) - this._processGlobStar(prefix, read, abs, remain, index, inGlobStar); - else - this._processReaddir(prefix, read, abs, remain, index, inGlobStar); + return false; }; - GlobSync.prototype._processReaddir = function(prefix, read, abs, remain, index, inGlobStar) { - var entries = this._readdir(abs, inGlobStar); - if (!entries) - return; - var pn = remain[0]; - var negate = !!this.minimatch.negate; - var rawGlob = pn._glob; - var dotOk = this.dot || rawGlob.charAt(0) === "."; - var matchedEntries = []; - for (var i2 = 0; i2 < entries.length; i2++) { - var e2 = entries[i2]; - if (e2.charAt(0) !== "." || dotOk) { - var m2; - if (negate && !prefix) { - m2 = !e2.match(pn); - } else { - m2 = e2.match(pn); - } - if (m2) - matchedEntries.push(e2); + module2.exports = isDirectory; + } +}); + +// node_modules/fs.realpath/old.js +var require_old = __commonJS({ + "node_modules/fs.realpath/old.js"(exports2) { + "use strict"; + var pathModule = require("path"); + var isWindows = process.platform === "win32"; + var fs2 = require("fs"); + var DEBUG = process.env.NODE_DEBUG && /fs/.test(process.env.NODE_DEBUG); + function rethrow() { + var callback; + if (DEBUG) { + var backtrace = new Error(); + callback = debugCallback; + } else + callback = missingCallback; + return callback; + function debugCallback(err) { + if (err) { + backtrace.message = err.message; + err = backtrace; + missingCallback(err); } } - var len = matchedEntries.length; - if (len === 0) - return; - if (remain.length === 1 && !this.mark && !this.stat) { - if (!this.matches[index]) - this.matches[index] = /* @__PURE__ */ Object.create(null); - for (var i2 = 0; i2 < len; i2++) { - var e2 = matchedEntries[i2]; - if (prefix) { - if (prefix.slice(-1) !== "/") - e2 = prefix + "/" + e2; + function missingCallback(err) { + if (err) { + if (process.throwDeprecation) + throw err; + else if (!process.noDeprecation) { + var msg = "fs: missing callback " + (err.stack || err.message); + if (process.traceDeprecation) + console.trace(msg); else - e2 = prefix + e2; - } - if (e2.charAt(0) === "/" && !this.nomount) { - e2 = path2.join(this.root, e2); + console.error(msg); } - this._emitMatch(index, e2); } - return; - } - remain.shift(); - for (var i2 = 0; i2 < len; i2++) { - var e2 = matchedEntries[i2]; - var newPattern; - if (prefix) - newPattern = [prefix, e2]; - else - newPattern = [e2]; - this._process(newPattern.concat(remain), index, inGlobStar); - } - }; - GlobSync.prototype._emitMatch = function(index, e2) { - if (isIgnored(this, e2)) - return; - var abs = this._makeAbs(e2); - if (this.mark) - e2 = this._mark(e2); - if (this.absolute) { - e2 = abs; } - if (this.matches[index][e2]) - return; - if (this.nodir) { - var c = this.cache[abs]; - if (c === "DIR" || Array.isArray(c)) - return; + } + function maybeCallback(cb) { + return typeof cb === "function" ? cb : rethrow(); + } + var normalize2 = pathModule.normalize; + if (isWindows) { + nextPartRe = /(.*?)(?:[\/\\]+|$)/g; + } else { + nextPartRe = /(.*?)(?:[\/]+|$)/g; + } + var nextPartRe; + if (isWindows) { + splitRootRe = /^(?:[a-zA-Z]:|[\\\/]{2}[^\\\/]+[\\\/][^\\\/]+)?[\\\/]*/; + } else { + splitRootRe = /^[\/]*/; + } + var splitRootRe; + exports2.realpathSync = function realpathSync(p, cache2) { + p = pathModule.resolve(p); + if (cache2 && Object.prototype.hasOwnProperty.call(cache2, p)) { + return cache2[p]; } - this.matches[index][e2] = true; - if (this.stat) - this._stat(e2); - }; - GlobSync.prototype._readdirInGlobStar = function(abs) { - if (this.follow) - return this._readdir(abs, false); - var entries; - var lstat; - var stat2; - try { - lstat = this.fs.lstatSync(abs); - } catch (er) { - if (er.code === "ENOENT") { - return null; + var original = p, seenLinks = {}, knownHard = {}; + var pos; + var current; + var base; + var previous; + start(); + function start() { + var m2 = splitRootRe.exec(p); + pos = m2[0].length; + current = m2[0]; + base = m2[0]; + previous = ""; + if (isWindows && !knownHard[base]) { + fs2.lstatSync(base); + knownHard[base] = true; } } - var isSym = lstat && lstat.isSymbolicLink(); - this.symlinks[abs] = isSym; - if (!isSym && lstat && !lstat.isDirectory()) - this.cache[abs] = "FILE"; - else - entries = this._readdir(abs, false); - return entries; - }; - GlobSync.prototype._readdir = function(abs, inGlobStar) { - var entries; - if (inGlobStar && !ownProp(this.symlinks, abs)) - return this._readdirInGlobStar(abs); - if (ownProp(this.cache, abs)) { - var c = this.cache[abs]; - if (!c || c === "FILE") - return null; - if (Array.isArray(c)) - return c; - } - try { - return this._readdirEntries(abs, this.fs.readdirSync(abs)); - } catch (er) { - this._readdirError(abs, er); - return null; - } - }; - GlobSync.prototype._readdirEntries = function(abs, entries) { - if (!this.mark && !this.stat) { - for (var i2 = 0; i2 < entries.length; i2++) { - var e2 = entries[i2]; - if (abs === "/") - e2 = abs + e2; - else - e2 = abs + "/" + e2; - this.cache[e2] = true; + while (pos < p.length) { + nextPartRe.lastIndex = pos; + var result = nextPartRe.exec(p); + previous = current; + current += result[0]; + base = previous + result[1]; + pos = nextPartRe.lastIndex; + if (knownHard[base] || cache2 && cache2[base] === base) { + continue; } - } - this.cache[abs] = entries; - return entries; - }; - GlobSync.prototype._readdirError = function(f3, er) { - switch (er.code) { - case "ENOTSUP": - case "ENOTDIR": - var abs = this._makeAbs(f3); - this.cache[abs] = "FILE"; - if (abs === this.cwdAbs) { - var error = new Error(er.code + " invalid cwd " + this.cwd); - error.path = this.cwd; - error.code = er.code; - throw error; + var resolvedLink; + if (cache2 && Object.prototype.hasOwnProperty.call(cache2, base)) { + resolvedLink = cache2[base]; + } else { + var stat2 = fs2.lstatSync(base); + if (!stat2.isSymbolicLink()) { + knownHard[base] = true; + if (cache2) cache2[base] = base; + continue; } - break; - case "ENOENT": - case "ELOOP": - case "ENAMETOOLONG": - case "UNKNOWN": - this.cache[this._makeAbs(f3)] = false; - break; - default: - this.cache[this._makeAbs(f3)] = false; - if (this.strict) - throw er; - if (!this.silent) - console.error("glob error", er); - break; + var linkTarget = null; + if (!isWindows) { + var id = stat2.dev.toString(32) + ":" + stat2.ino.toString(32); + if (seenLinks.hasOwnProperty(id)) { + linkTarget = seenLinks[id]; + } + } + if (linkTarget === null) { + fs2.statSync(base); + linkTarget = fs2.readlinkSync(base); + } + resolvedLink = pathModule.resolve(previous, linkTarget); + if (cache2) cache2[base] = resolvedLink; + if (!isWindows) seenLinks[id] = linkTarget; + } + p = pathModule.resolve(resolvedLink, p.slice(pos)); + start(); } + if (cache2) cache2[original] = p; + return p; }; - GlobSync.prototype._processGlobStar = function(prefix, read, abs, remain, index, inGlobStar) { - var entries = this._readdir(abs, inGlobStar); - if (!entries) - return; - var remainWithoutGlobStar = remain.slice(1); - var gspref = prefix ? [prefix] : []; - var noGlobStar = gspref.concat(remainWithoutGlobStar); - this._process(noGlobStar, index, false); - var len = entries.length; - var isSym = this.symlinks[abs]; - if (isSym && inGlobStar) - return; - for (var i2 = 0; i2 < len; i2++) { - var e2 = entries[i2]; - if (e2.charAt(0) === "." && !this.dot) - continue; - var instead = gspref.concat(entries[i2], remainWithoutGlobStar); - this._process(instead, index, true); - var below = gspref.concat(entries[i2], remain); - this._process(below, index, true); + exports2.realpath = function realpath(p, cache2, cb) { + if (typeof cb !== "function") { + cb = maybeCallback(cache2); + cache2 = null; } - }; - GlobSync.prototype._processSimple = function(prefix, index) { - var exists2 = this._stat(prefix); - if (!this.matches[index]) - this.matches[index] = /* @__PURE__ */ Object.create(null); - if (!exists2) - return; - if (prefix && isAbsolute(prefix) && !this.nomount) { - var trail = /[\/\\]$/.test(prefix); - if (prefix.charAt(0) === "/") { - prefix = path2.join(this.root, prefix); + p = pathModule.resolve(p); + if (cache2 && Object.prototype.hasOwnProperty.call(cache2, p)) { + return process.nextTick(cb.bind(null, null, cache2[p])); + } + var original = p, seenLinks = {}, knownHard = {}; + var pos; + var current; + var base; + var previous; + start(); + function start() { + var m2 = splitRootRe.exec(p); + pos = m2[0].length; + current = m2[0]; + base = m2[0]; + previous = ""; + if (isWindows && !knownHard[base]) { + fs2.lstat(base, function(err) { + if (err) return cb(err); + knownHard[base] = true; + LOOP(); + }); } else { - prefix = path2.resolve(this.root, prefix); - if (trail) - prefix += "/"; + process.nextTick(LOOP); } } - if (process.platform === "win32") - prefix = prefix.replace(/\\/g, "/"); - this._emitMatch(index, prefix); - }; - GlobSync.prototype._stat = function(f3) { - var abs = this._makeAbs(f3); - var needDir = f3.slice(-1) === "/"; - if (f3.length > this.maxLength) - return false; - if (!this.stat && ownProp(this.cache, abs)) { - var c = this.cache[abs]; - if (Array.isArray(c)) - c = "DIR"; - if (!needDir || c === "DIR") - return c; - if (needDir && c === "FILE") - return false; + function LOOP() { + if (pos >= p.length) { + if (cache2) cache2[original] = p; + return cb(null, p); + } + nextPartRe.lastIndex = pos; + var result = nextPartRe.exec(p); + previous = current; + current += result[0]; + base = previous + result[1]; + pos = nextPartRe.lastIndex; + if (knownHard[base] || cache2 && cache2[base] === base) { + return process.nextTick(LOOP); + } + if (cache2 && Object.prototype.hasOwnProperty.call(cache2, base)) { + return gotResolvedLink(cache2[base]); + } + return fs2.lstat(base, gotStat); } - var exists2; - var stat2 = this.statCache[abs]; - if (!stat2) { - var lstat; - try { - lstat = this.fs.lstatSync(abs); - } catch (er) { - if (er && (er.code === "ENOENT" || er.code === "ENOTDIR")) { - this.statCache[abs] = false; - return false; - } + function gotStat(err, stat2) { + if (err) return cb(err); + if (!stat2.isSymbolicLink()) { + knownHard[base] = true; + if (cache2) cache2[base] = base; + return process.nextTick(LOOP); } - if (lstat && lstat.isSymbolicLink()) { - try { - stat2 = this.fs.statSync(abs); - } catch (er) { - stat2 = lstat; + if (!isWindows) { + var id = stat2.dev.toString(32) + ":" + stat2.ino.toString(32); + if (seenLinks.hasOwnProperty(id)) { + return gotTarget(null, seenLinks[id], base); } - } else { - stat2 = lstat; } + fs2.stat(base, function(err2) { + if (err2) return cb(err2); + fs2.readlink(base, function(err3, target) { + if (!isWindows) seenLinks[id] = target; + gotTarget(err3, target); + }); + }); + } + function gotTarget(err, target, base2) { + if (err) return cb(err); + var resolvedLink = pathModule.resolve(previous, target); + if (cache2) cache2[base2] = resolvedLink; + gotResolvedLink(resolvedLink); + } + function gotResolvedLink(resolvedLink) { + p = pathModule.resolve(resolvedLink, p.slice(pos)); + start(); } - this.statCache[abs] = stat2; - var c = true; - if (stat2) - c = stat2.isDirectory() ? "DIR" : "FILE"; - this.cache[abs] = this.cache[abs] || c; - if (needDir && c === "FILE") - return false; - return c; - }; - GlobSync.prototype._mark = function(p) { - return common.mark(this, p); - }; - GlobSync.prototype._makeAbs = function(f3) { - return common.makeAbs(this, f3); }; } }); -// node_modules/inflight/inflight.js -var require_inflight = __commonJS({ - "node_modules/inflight/inflight.js"(exports2, module2) { +// node_modules/fs.realpath/index.js +var require_fs = __commonJS({ + "node_modules/fs.realpath/index.js"(exports2, module2) { "use strict"; - var wrappy = require_wrappy(); - var reqs = /* @__PURE__ */ Object.create(null); - var once2 = require_once(); - module2.exports = wrappy(inflight); - function inflight(key, cb) { - if (reqs[key]) { - reqs[key].push(cb); - return null; - } else { - reqs[key] = [cb]; - return makeres(key); - } + module2.exports = realpath; + realpath.realpath = realpath; + realpath.sync = realpathSync; + realpath.realpathSync = realpathSync; + realpath.monkeypatch = monkeypatch; + realpath.unmonkeypatch = unmonkeypatch; + var fs2 = require("fs"); + var origRealpath = fs2.realpath; + var origRealpathSync = fs2.realpathSync; + var version = process.version; + var ok = /^v[0-5]\./.test(version); + var old = require_old(); + function newError(er) { + return er && er.syscall === "realpath" && (er.code === "ELOOP" || er.code === "ENOMEM" || er.code === "ENAMETOOLONG"); } - function makeres(key) { - return once2(function RES() { - var cbs = reqs[key]; - var len = cbs.length; - var args = slice(arguments); - try { - for (var i2 = 0; i2 < len; i2++) { - cbs[i2].apply(null, args); - } - } finally { - if (cbs.length > len) { - cbs.splice(0, len); - process.nextTick(function() { - RES.apply(null, args); - }); - } else { - delete reqs[key]; - } + function realpath(p, cache2, cb) { + if (ok) { + return origRealpath(p, cache2, cb); + } + if (typeof cache2 === "function") { + cb = cache2; + cache2 = null; + } + origRealpath(p, cache2, function(er, result) { + if (newError(er)) { + old.realpath(p, cache2, cb); + } else { + cb(er, result); } }); } - function slice(args) { - var length = args.length; - var array = []; - for (var i2 = 0; i2 < length; i2++) - array[i2] = args[i2]; - return array; - } - } -}); - -// node_modules/glob/glob.js -var require_glob = __commonJS({ - "node_modules/glob/glob.js"(exports2, module2) { - "use strict"; - module2.exports = glob; - var rp = require_fs(); - var minimatch2 = require_minimatch(); - var Minimatch2 = minimatch2.Minimatch; - var inherits = require_inherits(); - var EE = require("events").EventEmitter; - var path2 = require("path"); - var assert = require("assert"); - var isAbsolute = require_path_is_absolute(); - var globSync = require_sync(); - var common = require_common2(); - var setopts = common.setopts; - var ownProp = common.ownProp; - var inflight = require_inflight(); - var util = require("util"); - var childrenIgnored = common.childrenIgnored; - var isIgnored = common.isIgnored; - var once2 = require_once(); - function glob(pattern, options, cb) { - if (typeof options === "function") - cb = options, options = {}; - if (!options) - options = {}; - if (options.sync) { - if (cb) - throw new TypeError("callback provided to sync glob"); - return globSync(pattern, options); - } - return new Glob(pattern, options, cb); - } - glob.sync = globSync; - var GlobSync = glob.GlobSync = globSync.GlobSync; - glob.glob = glob; - function extend(origin, add) { - if (add === null || typeof add !== "object") { - return origin; + function realpathSync(p, cache2) { + if (ok) { + return origRealpathSync(p, cache2); } - var keys = Object.keys(add); - var i2 = keys.length; - while (i2--) { - origin[keys[i2]] = add[keys[i2]]; + try { + return origRealpathSync(p, cache2); + } catch (er) { + if (newError(er)) { + return old.realpathSync(p, cache2); + } else { + throw er; + } } - return origin; } - glob.hasMagic = function(pattern, options_) { - var options = extend({}, options_); - options.noprocess = true; - var g = new Glob(pattern, options); - var set2 = g.minimatch.set; - if (!pattern) - return false; - if (set2.length > 1) - return true; - for (var j = 0; j < set2[0].length; j++) { - if (typeof set2[0][j] !== "string") - return true; + function monkeypatch() { + fs2.realpath = realpath; + fs2.realpathSync = realpathSync; + } + function unmonkeypatch() { + fs2.realpath = origRealpath; + fs2.realpathSync = origRealpathSync; + } + } +}); + +// node_modules/concat-map/index.js +var require_concat_map = __commonJS({ + "node_modules/concat-map/index.js"(exports2, module2) { + "use strict"; + module2.exports = function(xs, fn) { + var res = []; + for (var i2 = 0; i2 < xs.length; i2++) { + var x2 = fn(xs[i2], i2); + if (isArray(x2)) res.push.apply(res, x2); + else res.push(x2); } - return false; + return res; }; - glob.Glob = Glob; - inherits(Glob, EE); - function Glob(pattern, options, cb) { - if (typeof options === "function") { - cb = options; - options = null; - } - if (options && options.sync) { - if (cb) - throw new TypeError("callback provided to sync glob"); - return new GlobSync(pattern, options); - } - if (!(this instanceof Glob)) - return new Glob(pattern, options, cb); - setopts(this, pattern, options); - this._didRealPath = false; - var n = this.minimatch.set.length; - this.matches = new Array(n); - if (typeof cb === "function") { - cb = once2(cb); - this.on("error", cb); - this.on("end", function(matches) { - cb(null, matches); - }); - } - var self2 = this; - this._processing = 0; - this._emitQueue = []; - this._processQueue = []; - this.paused = false; - if (this.noprocess) - return this; - if (n === 0) - return done(); - var sync = true; - for (var i2 = 0; i2 < n; i2++) { - this._process(this.minimatch.set[i2], i2, false, done); - } - sync = false; - function done() { - --self2._processing; - if (self2._processing <= 0) { - if (sync) { - process.nextTick(function() { - self2._finish(); - }); + var isArray = Array.isArray || function(xs) { + return Object.prototype.toString.call(xs) === "[object Array]"; + }; + } +}); + +// node_modules/balanced-match/index.js +var require_balanced_match = __commonJS({ + "node_modules/balanced-match/index.js"(exports2, module2) { + "use strict"; + module2.exports = balanced; + function balanced(a, b, str) { + if (a instanceof RegExp) a = maybeMatch(a, str); + if (b instanceof RegExp) b = maybeMatch(b, str); + var r2 = range(a, b, str); + return r2 && { + start: r2[0], + end: r2[1], + pre: str.slice(0, r2[0]), + body: str.slice(r2[0] + a.length, r2[1]), + post: str.slice(r2[1] + b.length) + }; + } + function maybeMatch(reg, str) { + var m2 = str.match(reg); + return m2 ? m2[0] : null; + } + balanced.range = range; + function range(a, b, str) { + var begs, beg, left, right, result; + var ai = str.indexOf(a); + var bi = str.indexOf(b, ai + 1); + var i2 = ai; + if (ai >= 0 && bi > 0) { + if (a === b) { + return [ai, bi]; + } + begs = []; + left = str.length; + while (i2 >= 0 && !result) { + if (i2 == ai) { + begs.push(i2); + ai = str.indexOf(a, i2 + 1); + } else if (begs.length == 1) { + result = [begs.pop(), bi]; } else { - self2._finish(); + beg = begs.pop(); + if (beg < left) { + left = beg; + right = bi; + } + bi = str.indexOf(b, i2 + 1); } + i2 = ai < bi && ai >= 0 ? ai : bi; + } + if (begs.length) { + result = [left, right]; } } + return result; } - Glob.prototype._finish = function() { - assert(this instanceof Glob); - if (this.aborted) - return; - if (this.realpath && !this._didRealpath) - return this._realpath(); - common.finish(this); - this.emit("end", this.found); - }; - Glob.prototype._realpath = function() { - if (this._didRealpath) - return; - this._didRealpath = true; - var n = this.matches.length; - if (n === 0) - return this._finish(); - var self2 = this; - for (var i2 = 0; i2 < this.matches.length; i2++) - this._realpathSet(i2, next); - function next() { - if (--n === 0) - self2._finish(); + } +}); + +// node_modules/glob/node_modules/minimatch/node_modules/brace-expansion/index.js +var require_brace_expansion = __commonJS({ + "node_modules/glob/node_modules/minimatch/node_modules/brace-expansion/index.js"(exports2, module2) { + "use strict"; + var concatMap = require_concat_map(); + var balanced = require_balanced_match(); + module2.exports = expandTop; + var escSlash = "\0SLASH" + Math.random() + "\0"; + var escOpen = "\0OPEN" + Math.random() + "\0"; + var escClose = "\0CLOSE" + Math.random() + "\0"; + var escComma = "\0COMMA" + Math.random() + "\0"; + var escPeriod = "\0PERIOD" + Math.random() + "\0"; + function numeric(str) { + return parseInt(str, 10) == str ? parseInt(str, 10) : str.charCodeAt(0); + } + function escapeBraces(str) { + return str.split("\\\\").join(escSlash).split("\\{").join(escOpen).split("\\}").join(escClose).split("\\,").join(escComma).split("\\.").join(escPeriod); + } + function unescapeBraces(str) { + return str.split(escSlash).join("\\").split(escOpen).join("{").split(escClose).join("}").split(escComma).join(",").split(escPeriod).join("."); + } + function parseCommaParts(str) { + if (!str) + return [""]; + var parts = []; + var m2 = balanced("{", "}", str); + if (!m2) + return str.split(","); + var pre = m2.pre; + var body = m2.body; + var post = m2.post; + var p = pre.split(","); + p[p.length - 1] += "{" + body + "}"; + var postParts = parseCommaParts(post); + if (post.length) { + p[p.length - 1] += postParts.shift(); + p.push.apply(p, postParts); } - }; - Glob.prototype._realpathSet = function(index, cb) { - var matchset = this.matches[index]; - if (!matchset) - return cb(); - var found = Object.keys(matchset); - var self2 = this; - var n = found.length; - if (n === 0) - return cb(); - var set2 = this.matches[index] = /* @__PURE__ */ Object.create(null); - found.forEach(function(p, i2) { - p = self2._makeAbs(p); - rp.realpath(p, self2.realpathCache, function(er, real) { - if (!er) - set2[real] = true; - else if (er.syscall === "stat") - set2[p] = true; - else - self2.emit("error", er); - if (--n === 0) { - self2.matches[index] = set2; - cb(); - } - }); - }); - }; - Glob.prototype._mark = function(p) { - return common.mark(this, p); - }; - Glob.prototype._makeAbs = function(f3) { - return common.makeAbs(this, f3); - }; - Glob.prototype.abort = function() { - this.aborted = true; - this.emit("abort"); - }; - Glob.prototype.pause = function() { - if (!this.paused) { - this.paused = true; - this.emit("pause"); + parts.push.apply(parts, p); + return parts; + } + function expandTop(str) { + if (!str) + return []; + if (str.substr(0, 2) === "{}") { + str = "\\{\\}" + str.substr(2); } - }; - Glob.prototype.resume = function() { - if (this.paused) { - this.emit("resume"); - this.paused = false; - if (this._emitQueue.length) { - var eq = this._emitQueue.slice(0); - this._emitQueue.length = 0; - for (var i2 = 0; i2 < eq.length; i2++) { - var e2 = eq[i2]; - this._emitMatch(e2[0], e2[1]); - } + return expand4(escapeBraces(str), true).map(unescapeBraces); + } + function embrace(str) { + return "{" + str + "}"; + } + function isPadded(el) { + return /^-?0\d/.test(el); + } + function lte(i2, y) { + return i2 <= y; + } + function gte(i2, y) { + return i2 >= y; + } + function expand4(str, isTop) { + var expansions = []; + var m2 = balanced("{", "}", str); + if (!m2 || /\$$/.test(m2.pre)) return [str]; + var isNumericSequence = /^-?\d+\.\.-?\d+(?:\.\.-?\d+)?$/.test(m2.body); + var isAlphaSequence = /^[a-zA-Z]\.\.[a-zA-Z](?:\.\.-?\d+)?$/.test(m2.body); + var isSequence = isNumericSequence || isAlphaSequence; + var isOptions = m2.body.indexOf(",") >= 0; + if (!isSequence && !isOptions) { + if (m2.post.match(/,.*\}/)) { + str = m2.pre + "{" + m2.body + escClose + m2.post; + return expand4(str); } - if (this._processQueue.length) { - var pq = this._processQueue.slice(0); - this._processQueue.length = 0; - for (var i2 = 0; i2 < pq.length; i2++) { - var p = pq[i2]; - this._processing--; - this._process(p[0], p[1], p[2], p[3]); + return [str]; + } + var n; + if (isSequence) { + n = m2.body.split(/\.\./); + } else { + n = parseCommaParts(m2.body); + if (n.length === 1) { + n = expand4(n[0], false).map(embrace); + if (n.length === 1) { + var post = m2.post.length ? expand4(m2.post, false) : [""]; + return post.map(function(p) { + return m2.pre + n[0] + p; + }); } } } - }; - Glob.prototype._process = function(pattern, index, inGlobStar, cb) { - assert(this instanceof Glob); - assert(typeof cb === "function"); - if (this.aborted) - return; - this._processing++; - if (this.paused) { - this._processQueue.push([pattern, index, inGlobStar, cb]); - return; - } - var n = 0; - while (typeof pattern[n] === "string") { - n++; - } - var prefix; - switch (n) { - case pattern.length: - this._processSimple(pattern.join("/"), index, cb); - return; - case 0: - prefix = null; - break; - default: - prefix = pattern.slice(0, n).join("/"); - break; - } - var remain = pattern.slice(n); - var read; - if (prefix === null) - read = "."; - else if (isAbsolute(prefix) || isAbsolute(pattern.map(function(p) { - return typeof p === "string" ? p : "[*]"; - }).join("/"))) { - if (!prefix || !isAbsolute(prefix)) - prefix = "/" + prefix; - read = prefix; - } else - read = prefix; - var abs = this._makeAbs(read); - if (childrenIgnored(this, read)) - return cb(); - var isGlobStar = remain[0] === minimatch2.GLOBSTAR; - if (isGlobStar) - this._processGlobStar(prefix, read, abs, remain, index, inGlobStar, cb); - else - this._processReaddir(prefix, read, abs, remain, index, inGlobStar, cb); - }; - Glob.prototype._processReaddir = function(prefix, read, abs, remain, index, inGlobStar, cb) { - var self2 = this; - this._readdir(abs, inGlobStar, function(er, entries) { - return self2._processReaddir2(prefix, read, abs, remain, index, inGlobStar, entries, cb); - }); - }; - Glob.prototype._processReaddir2 = function(prefix, read, abs, remain, index, inGlobStar, entries, cb) { - if (!entries) - return cb(); - var pn = remain[0]; - var negate = !!this.minimatch.negate; - var rawGlob = pn._glob; - var dotOk = this.dot || rawGlob.charAt(0) === "."; - var matchedEntries = []; - for (var i2 = 0; i2 < entries.length; i2++) { - var e2 = entries[i2]; - if (e2.charAt(0) !== "." || dotOk) { - var m2; - if (negate && !prefix) { - m2 = !e2.match(pn); + var pre = m2.pre; + var post = m2.post.length ? expand4(m2.post, false) : [""]; + var N; + if (isSequence) { + var x2 = numeric(n[0]); + var y = numeric(n[1]); + var width = Math.max(n[0].length, n[1].length); + var incr = n.length == 3 ? Math.abs(numeric(n[2])) : 1; + var test = lte; + var reverse = y < x2; + if (reverse) { + incr *= -1; + test = gte; + } + var pad = n.some(isPadded); + N = []; + for (var i2 = x2; test(i2, y); i2 += incr) { + var c; + if (isAlphaSequence) { + c = String.fromCharCode(i2); + if (c === "\\") + c = ""; } else { - m2 = e2.match(pn); + c = String(i2); + if (pad) { + var need = width - c.length; + if (need > 0) { + var z = new Array(need + 1).join("0"); + if (i2 < 0) + c = "-" + z + c.slice(1); + else + c = z + c; + } + } } - if (m2) - matchedEntries.push(e2); + N.push(c); } + } else { + N = concatMap(n, function(el) { + return expand4(el, false); + }); } - var len = matchedEntries.length; - if (len === 0) - return cb(); - if (remain.length === 1 && !this.mark && !this.stat) { - if (!this.matches[index]) - this.matches[index] = /* @__PURE__ */ Object.create(null); - for (var i2 = 0; i2 < len; i2++) { - var e2 = matchedEntries[i2]; - if (prefix) { - if (prefix !== "/") - e2 = prefix + "/" + e2; - else - e2 = prefix + e2; - } - if (e2.charAt(0) === "/" && !this.nomount) { - e2 = path2.join(this.root, e2); - } - this._emitMatch(index, e2); + for (var j = 0; j < N.length; j++) { + for (var k = 0; k < post.length; k++) { + var expansion = pre + N[j] + post[k]; + if (!isTop || isSequence || expansion) + expansions.push(expansion); } - return cb(); } - remain.shift(); - for (var i2 = 0; i2 < len; i2++) { - var e2 = matchedEntries[i2]; - var newPattern; - if (prefix) { - if (prefix !== "/") - e2 = prefix + "/" + e2; - else - e2 = prefix + e2; - } - this._process([e2].concat(remain), index, inGlobStar, cb); + return expansions; + } + } +}); + +// node_modules/glob/node_modules/minimatch/minimatch.js +var require_minimatch = __commonJS({ + "node_modules/glob/node_modules/minimatch/minimatch.js"(exports2, module2) { + "use strict"; + module2.exports = minimatch2; + minimatch2.Minimatch = Minimatch2; + var path2 = function() { + try { + return require("path"); + } catch (e2) { } - cb(); + }() || { + sep: "/" }; - Glob.prototype._emitMatch = function(index, e2) { - if (this.aborted) - return; - if (isIgnored(this, e2)) - return; - if (this.paused) { - this._emitQueue.push([index, e2]); - return; - } - var abs = isAbsolute(e2) ? e2 : this._makeAbs(e2); - if (this.mark) - e2 = this._mark(e2); - if (this.absolute) - e2 = abs; - if (this.matches[index][e2]) - return; - if (this.nodir) { - var c = this.cache[abs]; - if (c === "DIR" || Array.isArray(c)) - return; - } - this.matches[index][e2] = true; - var st = this.statCache[abs]; - if (st) - this.emit("stat", e2, st); - this.emit("match", e2); + minimatch2.sep = path2.sep; + var GLOBSTAR2 = minimatch2.GLOBSTAR = Minimatch2.GLOBSTAR = {}; + var expand4 = require_brace_expansion(); + var plTypes = { + "!": { open: "(?:(?!(?:", close: "))[^/]*?)" }, + "?": { open: "(?:", close: ")?" }, + "+": { open: "(?:", close: ")+" }, + "*": { open: "(?:", close: ")*" }, + "@": { open: "(?:", close: ")" } }; - Glob.prototype._readdirInGlobStar = function(abs, cb) { - if (this.aborted) - return; - if (this.follow) - return this._readdir(abs, false, cb); - var lstatkey = "lstat\0" + abs; - var self2 = this; - var lstatcb = inflight(lstatkey, lstatcb_); - if (lstatcb) - self2.fs.lstat(abs, lstatcb); - function lstatcb_(er, lstat) { - if (er && er.code === "ENOENT") - return cb(); - var isSym = lstat && lstat.isSymbolicLink(); - self2.symlinks[abs] = isSym; - if (!isSym && lstat && !lstat.isDirectory()) { - self2.cache[abs] = "FILE"; - cb(); - } else - self2._readdir(abs, false, cb); + var qmark3 = "[^/]"; + var star3 = qmark3 + "*?"; + var twoStarDot2 = "(?:(?!(?:\\/|^)(?:\\.{1,2})($|\\/)).)*?"; + var twoStarNoDot2 = "(?:(?!(?:\\/|^)\\.).)*?"; + var reSpecials2 = charSet("().*{}+?[]^$\\!"); + function charSet(s2) { + return s2.split("").reduce(function(set2, c) { + set2[c] = true; + return set2; + }, {}); + } + var slashSplit = /\/+/; + minimatch2.filter = filter2; + function filter2(pattern, options) { + options = options || {}; + return function(p, i2, list) { + return minimatch2(p, pattern, options); + }; + } + function ext2(a, b) { + b = b || {}; + var t2 = {}; + Object.keys(a).forEach(function(k) { + t2[k] = a[k]; + }); + Object.keys(b).forEach(function(k) { + t2[k] = b[k]; + }); + return t2; + } + minimatch2.defaults = function(def) { + if (!def || typeof def !== "object" || !Object.keys(def).length) { + return minimatch2; } + var orig = minimatch2; + var m2 = function minimatch3(p, pattern, options) { + return orig(p, pattern, ext2(def, options)); + }; + m2.Minimatch = function Minimatch3(pattern, options) { + return new orig.Minimatch(pattern, ext2(def, options)); + }; + m2.Minimatch.defaults = function defaults2(options) { + return orig.defaults(ext2(def, options)).Minimatch; + }; + m2.filter = function filter3(pattern, options) { + return orig.filter(pattern, ext2(def, options)); + }; + m2.defaults = function defaults2(options) { + return orig.defaults(ext2(def, options)); + }; + m2.makeRe = function makeRe3(pattern, options) { + return orig.makeRe(pattern, ext2(def, options)); + }; + m2.braceExpand = function braceExpand3(pattern, options) { + return orig.braceExpand(pattern, ext2(def, options)); + }; + m2.match = function(list, pattern, options) { + return orig.match(list, pattern, ext2(def, options)); + }; + return m2; }; - Glob.prototype._readdir = function(abs, inGlobStar, cb) { - if (this.aborted) - return; - cb = inflight("readdir\0" + abs + "\0" + inGlobStar, cb); - if (!cb) - return; - if (inGlobStar && !ownProp(this.symlinks, abs)) - return this._readdirInGlobStar(abs, cb); - if (ownProp(this.cache, abs)) { - var c = this.cache[abs]; - if (!c || c === "FILE") - return cb(); - if (Array.isArray(c)) - return cb(null, c); - } - var self2 = this; - self2.fs.readdir(abs, readdirCb(this, abs, cb)); + Minimatch2.defaults = function(def) { + return minimatch2.defaults(def).Minimatch; }; - function readdirCb(self2, abs, cb) { - return function(er, entries) { - if (er) - self2._readdirError(abs, er, cb); - else - self2._readdirEntries(abs, entries, cb); - }; + function minimatch2(p, pattern, options) { + assertValidPattern2(pattern); + if (!options) options = {}; + if (!options.nocomment && pattern.charAt(0) === "#") { + return false; + } + return new Minimatch2(pattern, options).match(p); } - Glob.prototype._readdirEntries = function(abs, entries, cb) { - if (this.aborted) - return; - if (!this.mark && !this.stat) { - for (var i2 = 0; i2 < entries.length; i2++) { - var e2 = entries[i2]; - if (abs === "/") - e2 = abs + e2; - else - e2 = abs + "/" + e2; - this.cache[e2] = true; - } + function Minimatch2(pattern, options) { + if (!(this instanceof Minimatch2)) { + return new Minimatch2(pattern, options); } - this.cache[abs] = entries; - return cb(null, entries); + assertValidPattern2(pattern); + if (!options) options = {}; + pattern = pattern.trim(); + if (!options.allowWindowsEscape && path2.sep !== "/") { + pattern = pattern.split(path2.sep).join("/"); + } + this.options = options; + this.set = []; + this.pattern = pattern; + this.regexp = null; + this.negate = false; + this.comment = false; + this.empty = false; + this.partial = !!options.partial; + this.make(); + } + Minimatch2.prototype.debug = function() { }; - Glob.prototype._readdirError = function(f3, er, cb) { - if (this.aborted) + Minimatch2.prototype.make = make; + function make() { + var pattern = this.pattern; + var options = this.options; + if (!options.nocomment && pattern.charAt(0) === "#") { + this.comment = true; return; - switch (er.code) { - case "ENOTSUP": - case "ENOTDIR": - var abs = this._makeAbs(f3); - this.cache[abs] = "FILE"; - if (abs === this.cwdAbs) { - var error = new Error(er.code + " invalid cwd " + this.cwd); - error.path = this.cwd; - error.code = er.code; - this.emit("error", error); - this.abort(); - } - break; - case "ENOENT": - case "ELOOP": - case "ENAMETOOLONG": - case "UNKNOWN": - this.cache[this._makeAbs(f3)] = false; - break; - default: - this.cache[this._makeAbs(f3)] = false; - if (this.strict) { - this.emit("error", er); - this.abort(); - } - if (!this.silent) - console.error("glob error", er); - break; } - return cb(); - }; - Glob.prototype._processGlobStar = function(prefix, read, abs, remain, index, inGlobStar, cb) { - var self2 = this; - this._readdir(abs, inGlobStar, function(er, entries) { - self2._processGlobStar2(prefix, read, abs, remain, index, inGlobStar, entries, cb); - }); - }; - Glob.prototype._processGlobStar2 = function(prefix, read, abs, remain, index, inGlobStar, entries, cb) { - if (!entries) - return cb(); - var remainWithoutGlobStar = remain.slice(1); - var gspref = prefix ? [prefix] : []; - var noGlobStar = gspref.concat(remainWithoutGlobStar); - this._process(noGlobStar, index, false, cb); - var isSym = this.symlinks[abs]; - var len = entries.length; - if (isSym && inGlobStar) - return cb(); - for (var i2 = 0; i2 < len; i2++) { - var e2 = entries[i2]; - if (e2.charAt(0) === "." && !this.dot) - continue; - var instead = gspref.concat(entries[i2], remainWithoutGlobStar); - this._process(instead, index, true, cb); - var below = gspref.concat(entries[i2], remain); - this._process(below, index, true, cb); + if (!pattern) { + this.empty = true; + return; } - cb(); - }; - Glob.prototype._processSimple = function(prefix, index, cb) { - var self2 = this; - this._stat(prefix, function(er, exists2) { - self2._processSimple2(prefix, index, er, exists2, cb); + this.parseNegate(); + var set2 = this.globSet = this.braceExpand(); + if (options.debug) this.debug = function debug5() { + console.error.apply(console, arguments); + }; + this.debug(this.pattern, set2); + set2 = this.globParts = set2.map(function(s2) { + return s2.split(slashSplit); }); - }; - Glob.prototype._processSimple2 = function(prefix, index, er, exists2, cb) { - if (!this.matches[index]) - this.matches[index] = /* @__PURE__ */ Object.create(null); - if (!exists2) - return cb(); - if (prefix && isAbsolute(prefix) && !this.nomount) { - var trail = /[\/\\]$/.test(prefix); - if (prefix.charAt(0) === "/") { - prefix = path2.join(this.root, prefix); - } else { - prefix = path2.resolve(this.root, prefix); - if (trail) - prefix += "/"; - } + this.debug(this.pattern, set2); + set2 = set2.map(function(s2, si, set3) { + return s2.map(this.parse, this); + }, this); + this.debug(this.pattern, set2); + set2 = set2.filter(function(s2) { + return s2.indexOf(false) === -1; + }); + this.debug(this.pattern, set2); + this.set = set2; + } + Minimatch2.prototype.parseNegate = parseNegate; + function parseNegate() { + var pattern = this.pattern; + var negate = false; + var options = this.options; + var negateOffset = 0; + if (options.nonegate) return; + for (var i2 = 0, l = pattern.length; i2 < l && pattern.charAt(i2) === "!"; i2++) { + negate = !negate; + negateOffset++; } - if (process.platform === "win32") - prefix = prefix.replace(/\\/g, "/"); - this._emitMatch(index, prefix); - cb(); + if (negateOffset) this.pattern = pattern.substr(negateOffset); + this.negate = negate; + } + minimatch2.braceExpand = function(pattern, options) { + return braceExpand2(pattern, options); }; - Glob.prototype._stat = function(f3, cb) { - var abs = this._makeAbs(f3); - var needDir = f3.slice(-1) === "/"; - if (f3.length > this.maxLength) - return cb(); - if (!this.stat && ownProp(this.cache, abs)) { - var c = this.cache[abs]; - if (Array.isArray(c)) - c = "DIR"; - if (!needDir || c === "DIR") - return cb(null, c); - if (needDir && c === "FILE") - return cb(); - } - var exists2; - var stat2 = this.statCache[abs]; - if (stat2 !== void 0) { - if (stat2 === false) - return cb(null, stat2); - else { - var type = stat2.isDirectory() ? "DIR" : "FILE"; - if (needDir && type === "FILE") - return cb(); - else - return cb(null, type, stat2); - } - } - var self2 = this; - var statcb = inflight("stat\0" + abs, lstatcb_); - if (statcb) - self2.fs.lstat(abs, statcb); - function lstatcb_(er, lstat) { - if (lstat && lstat.isSymbolicLink()) { - return self2.fs.stat(abs, function(er2, stat3) { - if (er2) - self2._stat2(f3, abs, null, lstat, cb); - else - self2._stat2(f3, abs, er2, stat3, cb); - }); + Minimatch2.prototype.braceExpand = braceExpand2; + function braceExpand2(pattern, options) { + if (!options) { + if (this instanceof Minimatch2) { + options = this.options; } else { - self2._stat2(f3, abs, er, lstat, cb); + options = {}; } } - }; - Glob.prototype._stat2 = function(f3, abs, er, stat2, cb) { - if (er && (er.code === "ENOENT" || er.code === "ENOTDIR")) { - this.statCache[abs] = false; - return cb(); + pattern = typeof pattern === "undefined" ? this.pattern : pattern; + assertValidPattern2(pattern); + if (options.nobrace || !/\{(?:(?!\{).)*\}/.test(pattern)) { + return [pattern]; } - var needDir = f3.slice(-1) === "/"; - this.statCache[abs] = stat2; - if (abs.slice(-1) === "/" && stat2 && !stat2.isDirectory()) - return cb(null, false, stat2); - var c = true; - if (stat2) - c = stat2.isDirectory() ? "DIR" : "FILE"; - this.cache[abs] = this.cache[abs] || c; - if (needDir && c === "FILE") - return cb(); - return cb(null, c, stat2); - }; - } -}); - -// node_modules/true-case-path/index.js -var require_true_case_path = __commonJS({ - "node_modules/true-case-path/index.js"(exports2, module2) { - "use strict"; - var glob = require_glob(); - var path2 = require("path"); - function trueCasePathSync(fsPath) { - var fsPathNormalized = path2.normalize(fsPath); - if (process.platform === "darwin") - fsPathNormalized = fsPathNormalized.normalize("NFD"); - var pathRoot = path2.parse(fsPathNormalized).root; - var noDrivePath = fsPathNormalized.slice(Math.max(pathRoot.length - 1, 0)); - return glob.sync(noDrivePath, { nocase: true, cwd: pathRoot })[0]; - } - module2.exports = trueCasePathSync; - } -}); - -// node_modules/codeowners/codeowners.js -var require_codeowners = __commonJS({ - "node_modules/codeowners/codeowners.js"(exports2, module2) { - "use strict"; - var findUp = require_find_up(); - var fs2 = require("fs"); - var ignore = require_ignore(); - var isDirectory = require_is_directory(); - var path2 = require("path"); - var trueCasePath = require_true_case_path(); - function ownerMatcher(pathString) { - const matcher = ignore().add(pathString); - return matcher.ignores.bind(matcher); + return expand4(pattern); } - function Codeowners2(currentPath, fileName = "CODEOWNERS") { - const pathOrCwd = currentPath || process.cwd(); - const codeownersPath = findUp.sync( - [`.github/${fileName}`, `.gitlab/${fileName}`, `docs/${fileName}`, `${fileName}`], - { cwd: pathOrCwd } - ); - if (!codeownersPath) { - throw new Error(`Could not find a CODEOWNERS file`); - } - this.codeownersFilePath = trueCasePath(codeownersPath); - this.codeownersDirectory = path2.dirname(this.codeownersFilePath); - if (this.codeownersDirectory.match(/\/(.github|.gitlab|docs)$/i)) { - this.codeownersDirectory = path2.dirname(this.codeownersDirectory); + var MAX_PATTERN_LENGTH2 = 1024 * 64; + var assertValidPattern2 = function(pattern) { + if (typeof pattern !== "string") { + throw new TypeError("invalid pattern"); } - const codeownersFile = path2.basename(this.codeownersFilePath); - if (codeownersFile !== fileName) { - throw new Error(`Found a ${fileName} file but it was lower-cased: ${this.codeownersFilePath}`); + if (pattern.length > MAX_PATTERN_LENGTH2) { + throw new TypeError("pattern is too long"); } - if (isDirectory.sync(this.codeownersFilePath)) { - throw new Error(`Found a ${fileName} but it's a directory: ${this.codeownersFilePath}`); + }; + Minimatch2.prototype.parse = parse3; + var SUBPARSE = {}; + function parse3(pattern, isSub) { + assertValidPattern2(pattern); + var options = this.options; + if (pattern === "**") { + if (!options.noglobstar) + return GLOBSTAR2; + else + pattern = "*"; } - const lines = fs2.readFileSync(this.codeownersFilePath).toString().split(/\r\n|\r|\n/); - const ownerEntries = []; - for (const line of lines) { - if (!line) { - continue; - } - if (line.startsWith("#")) { - continue; + if (pattern === "") return ""; + var re = ""; + var hasMagic = !!options.nocase; + var escaping = false; + var patternListStack = []; + var negativeLists = []; + var stateChar; + var inClass = false; + var reClassStart = -1; + var classStart = -1; + var patternStart = pattern.charAt(0) === "." ? "" : options.dot ? "(?!(?:^|\\/)\\.{1,2}(?:$|\\/))" : "(?!\\.)"; + var self2 = this; + function clearStateChar() { + if (stateChar) { + switch (stateChar) { + case "*": + re += star3; + hasMagic = true; + break; + case "?": + re += qmark3; + hasMagic = true; + break; + default: + re += "\\" + stateChar; + break; + } + self2.debug("clearStateChar %j %j", stateChar, re); + stateChar = false; } - const [pathString, ...usernames] = line.split(/\s+/); - ownerEntries.push({ - path: pathString, - usernames, - match: ownerMatcher(pathString) - }); } - this.ownerEntries = ownerEntries.reverse(); - } - var EMPTY_ARRAY = []; - Codeowners2.prototype.getOwner = function getOwner(filePath) { - for (const entry of this.ownerEntries) { - if (entry.match(filePath)) { - return entry.usernames; + for (var i2 = 0, len = pattern.length, c; i2 < len && (c = pattern.charAt(i2)); i2++) { + this.debug("%s %s %s %j", pattern, i2, re, c); + if (escaping && reSpecials2[c]) { + re += "\\" + c; + escaping = false; + continue; } - } - return EMPTY_ARRAY; - }; - module2.exports = Codeowners2; - } -}); - -// node_modules/brace-expansion/index.js -var require_brace_expansion2 = __commonJS({ - "node_modules/brace-expansion/index.js"(exports2, module2) { - "use strict"; - var balanced = require_balanced_match(); - module2.exports = expandTop; - var escSlash = "\0SLASH" + Math.random() + "\0"; - var escOpen = "\0OPEN" + Math.random() + "\0"; - var escClose = "\0CLOSE" + Math.random() + "\0"; - var escComma = "\0COMMA" + Math.random() + "\0"; - var escPeriod = "\0PERIOD" + Math.random() + "\0"; - function numeric(str) { - return parseInt(str, 10) == str ? parseInt(str, 10) : str.charCodeAt(0); - } - function escapeBraces(str) { - return str.split("\\\\").join(escSlash).split("\\{").join(escOpen).split("\\}").join(escClose).split("\\,").join(escComma).split("\\.").join(escPeriod); - } - function unescapeBraces(str) { - return str.split(escSlash).join("\\").split(escOpen).join("{").split(escClose).join("}").split(escComma).join(",").split(escPeriod).join("."); - } - function parseCommaParts(str) { - if (!str) - return [""]; - var parts = []; - var m2 = balanced("{", "}", str); - if (!m2) - return str.split(","); - var pre = m2.pre; - var body = m2.body; - var post = m2.post; - var p = pre.split(","); - p[p.length - 1] += "{" + body + "}"; - var postParts = parseCommaParts(post); - if (post.length) { - p[p.length - 1] += postParts.shift(); - p.push.apply(p, postParts); - } - parts.push.apply(parts, p); - return parts; - } - function expandTop(str) { - if (!str) - return []; - if (str.substr(0, 2) === "{}") { - str = "\\{\\}" + str.substr(2); - } - return expand3(escapeBraces(str), true).map(unescapeBraces); - } - function embrace(str) { - return "{" + str + "}"; - } - function isPadded(el) { - return /^-?0\d/.test(el); - } - function lte(i2, y) { - return i2 <= y; - } - function gte(i2, y) { - return i2 >= y; - } - function expand3(str, isTop) { - var expansions = []; - var m2 = balanced("{", "}", str); - if (!m2) - return [str]; - var pre = m2.pre; - var post = m2.post.length ? expand3(m2.post, false) : [""]; - if (/\$$/.test(m2.pre)) { - for (var k = 0; k < post.length; k++) { - var expansion = pre + "{" + m2.body + "}" + post[k]; - expansions.push(expansion); - } - } else { - var isNumericSequence = /^-?\d+\.\.-?\d+(?:\.\.-?\d+)?$/.test(m2.body); - var isAlphaSequence = /^[a-zA-Z]\.\.[a-zA-Z](?:\.\.-?\d+)?$/.test(m2.body); - var isSequence = isNumericSequence || isAlphaSequence; - var isOptions = m2.body.indexOf(",") >= 0; - if (!isSequence && !isOptions) { - if (m2.post.match(/,.*\}/)) { - str = m2.pre + "{" + m2.body + escClose + m2.post; - return expand3(str); + switch (c) { + /* istanbul ignore next */ + case "/": { + return false; } - return [str]; - } - var n; - if (isSequence) { - n = m2.body.split(/\.\./); - } else { - n = parseCommaParts(m2.body); - if (n.length === 1) { - n = expand3(n[0], false).map(embrace); - if (n.length === 1) { - return post.map(function(p) { - return m2.pre + n[0] + p; - }); + case "\\": + clearStateChar(); + escaping = true; + continue; + // the various stateChar values + // for the "extglob" stuff. + case "?": + case "*": + case "+": + case "@": + case "!": + this.debug("%s %s %s %j <-- stateChar", pattern, i2, re, c); + if (inClass) { + this.debug(" in class"); + if (c === "!" && i2 === classStart + 1) c = "^"; + re += c; + continue; } - } - } - var N; - if (isSequence) { - var x2 = numeric(n[0]); - var y = numeric(n[1]); - var width = Math.max(n[0].length, n[1].length); - var incr = n.length == 3 ? Math.abs(numeric(n[2])) : 1; - var test = lte; - var reverse = y < x2; - if (reverse) { - incr *= -1; - test = gte; - } - var pad = n.some(isPadded); - N = []; - for (var i2 = x2; test(i2, y); i2 += incr) { - var c; - if (isAlphaSequence) { - c = String.fromCharCode(i2); - if (c === "\\") - c = ""; - } else { - c = String(i2); - if (pad) { - var need = width - c.length; - if (need > 0) { - var z = new Array(need + 1).join("0"); - if (i2 < 0) - c = "-" + z + c.slice(1); - else - c = z + c; - } - } + self2.debug("call clearStateChar %j", stateChar); + clearStateChar(); + stateChar = c; + if (options.noext) clearStateChar(); + continue; + case "(": + if (inClass) { + re += "("; + continue; } - N.push(c); - } - } else { - N = []; - for (var j = 0; j < n.length; j++) { - N.push.apply(N, expand3(n[j], false)); - } - } - for (var j = 0; j < N.length; j++) { - for (var k = 0; k < post.length; k++) { - var expansion = pre + N[j] + post[k]; - if (!isTop || isSequence || expansion) - expansions.push(expansion); - } + if (!stateChar) { + re += "\\("; + continue; + } + patternListStack.push({ + type: stateChar, + start: i2 - 1, + reStart: re.length, + open: plTypes[stateChar].open, + close: plTypes[stateChar].close + }); + re += stateChar === "!" ? "(?:(?!(?:" : "(?:"; + this.debug("plType %j %j", stateChar, re); + stateChar = false; + continue; + case ")": + if (inClass || !patternListStack.length) { + re += "\\)"; + continue; + } + clearStateChar(); + hasMagic = true; + var pl = patternListStack.pop(); + re += pl.close; + if (pl.type === "!") { + negativeLists.push(pl); + } + pl.reEnd = re.length; + continue; + case "|": + if (inClass || !patternListStack.length || escaping) { + re += "\\|"; + escaping = false; + continue; + } + clearStateChar(); + re += "|"; + continue; + // these are mostly the same in regexp and glob + case "[": + clearStateChar(); + if (inClass) { + re += "\\" + c; + continue; + } + inClass = true; + classStart = i2; + reClassStart = re.length; + re += c; + continue; + case "]": + if (i2 === classStart + 1 || !inClass) { + re += "\\" + c; + escaping = false; + continue; + } + var cs = pattern.substring(classStart + 1, i2); + try { + RegExp("[" + cs + "]"); + } catch (er) { + var sp = this.parse(cs, SUBPARSE); + re = re.substr(0, reClassStart) + "\\[" + sp[0] + "\\]"; + hasMagic = hasMagic || sp[1]; + inClass = false; + continue; + } + hasMagic = true; + inClass = false; + re += c; + continue; + default: + clearStateChar(); + if (escaping) { + escaping = false; + } else if (reSpecials2[c] && !(c === "^" && inClass)) { + re += "\\"; + } + re += c; } } - return expansions; - } - } -}); - -// node_modules/@octokit/auth-action/dist-node/index.js -var require_dist_node = __commonJS({ - "node_modules/@octokit/auth-action/dist-node/index.js"(exports2, module2) { - "use strict"; - var __defProp3 = Object.defineProperty; - var __getOwnPropDesc3 = Object.getOwnPropertyDescriptor; - var __getOwnPropNames3 = Object.getOwnPropertyNames; - var __hasOwnProp3 = Object.prototype.hasOwnProperty; - var __export3 = (target, all) => { - for (var name in all) - __defProp3(target, name, { get: all[name], enumerable: true }); - }; - var __copyProps3 = (to, from, except, desc) => { - if (from && typeof from === "object" || typeof from === "function") { - for (let key of __getOwnPropNames3(from)) - if (!__hasOwnProp3.call(to, key) && key !== except) - __defProp3(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc3(from, key)) || desc.enumerable }); - } - return to; - }; - var __toCommonJS3 = (mod) => __copyProps3(__defProp3({}, "__esModule", { value: true }), mod); - var dist_src_exports3 = {}; - __export3(dist_src_exports3, { - createActionAuth: () => createActionAuth2 - }); - module2.exports = __toCommonJS3(dist_src_exports3); - var import_auth_token2 = (init_dist_src4(), __toCommonJS(dist_src_exports)); - var createActionAuth2 = function createActionAuth22() { - if (!process.env.GITHUB_ACTION) { - throw new Error( - "[@octokit/auth-action] `GITHUB_ACTION` environment variable is not set. @octokit/auth-action is meant to be used in GitHub Actions only." - ); - } - const definitions = [ - process.env.GITHUB_TOKEN, - process.env.INPUT_GITHUB_TOKEN, - process.env.INPUT_TOKEN - ].filter(Boolean); - if (definitions.length === 0) { - throw new Error( - "[@octokit/auth-action] `GITHUB_TOKEN` variable is not set. It must be set on either `env:` or `with:`. See https://github.com/octokit/auth-action.js#createactionauth" - ); + if (inClass) { + cs = pattern.substr(classStart + 1); + sp = this.parse(cs, SUBPARSE); + re = re.substr(0, reClassStart) + "\\[" + sp[0]; + hasMagic = hasMagic || sp[1]; } - if (definitions.length > 1) { - throw new Error( - "[@octokit/auth-action] The token variable is specified more than once. Use either `with.token`, `with.GITHUB_TOKEN`, or `env.GITHUB_TOKEN`. See https://github.com/octokit/auth-action.js#createactionauth" - ); + for (pl = patternListStack.pop(); pl; pl = patternListStack.pop()) { + var tail = re.slice(pl.reStart + pl.open.length); + this.debug("setting tail", re, pl); + tail = tail.replace(/((?:\\{2}){0,64})(\\?)\|/g, function(_, $1, $2) { + if (!$2) { + $2 = "\\"; + } + return $1 + $1 + $2 + "|"; + }); + this.debug("tail=%j\n %s", tail, tail, pl, re); + var t2 = pl.type === "*" ? star3 : pl.type === "?" ? qmark3 : "\\" + pl.type; + hasMagic = true; + re = re.slice(0, pl.reStart) + t2 + "\\(" + tail; } - const token = definitions.pop(); - return (0, import_auth_token2.createTokenAuth)(token); - }; - } -}); - -// node_modules/web-streams-polyfill/dist/ponyfill.es2018.js -var require_ponyfill_es2018 = __commonJS({ - "node_modules/web-streams-polyfill/dist/ponyfill.es2018.js"(exports2, module2) { - "use strict"; - (function(global2, factory) { - typeof exports2 === "object" && typeof module2 !== "undefined" ? factory(exports2) : typeof define === "function" && define.amd ? define(["exports"], factory) : (global2 = typeof globalThis !== "undefined" ? globalThis : global2 || self, factory(global2.WebStreamsPolyfill = {})); - })(exports2, function(exports3) { - "use strict"; - function noop3() { - return void 0; + clearStateChar(); + if (escaping) { + re += "\\\\"; } - function typeIsObject(x2) { - return typeof x2 === "object" && x2 !== null || typeof x2 === "function"; + var addPatternStart2 = false; + switch (re.charAt(0)) { + case "[": + case ".": + case "(": + addPatternStart2 = true; } - const rethrowAssertionErrorRejection = noop3; - function setFunctionName(fn, name) { - try { - Object.defineProperty(fn, "name", { - value: name, - configurable: true - }); - } catch (_a3) { + for (var n = negativeLists.length - 1; n > -1; n--) { + var nl = negativeLists[n]; + var nlBefore = re.slice(0, nl.reStart); + var nlFirst = re.slice(nl.reStart, nl.reEnd - 8); + var nlLast = re.slice(nl.reEnd - 8, nl.reEnd); + var nlAfter = re.slice(nl.reEnd); + nlLast += nlAfter; + var openParensBefore = nlBefore.split("(").length - 1; + var cleanAfter = nlAfter; + for (i2 = 0; i2 < openParensBefore; i2++) { + cleanAfter = cleanAfter.replace(/\)[+*?]?/, ""); } + nlAfter = cleanAfter; + var dollar = ""; + if (nlAfter === "" && isSub !== SUBPARSE) { + dollar = "$"; + } + var newRe = nlBefore + nlFirst + nlAfter + dollar + nlLast; + re = newRe; } - const originalPromise = Promise; - const originalPromiseThen = Promise.prototype.then; - const originalPromiseReject = Promise.reject.bind(originalPromise); - function newPromise(executor) { - return new originalPromise(executor); - } - function promiseResolvedWith(value) { - return newPromise((resolve) => resolve(value)); + if (re !== "" && hasMagic) { + re = "(?=.)" + re; } - function promiseRejectedWith(reason) { - return originalPromiseReject(reason); + if (addPatternStart2) { + re = patternStart + re; } - function PerformPromiseThen(promise, onFulfilled, onRejected) { - return originalPromiseThen.call(promise, onFulfilled, onRejected); + if (isSub === SUBPARSE) { + return [re, hasMagic]; } - function uponPromise(promise, onFulfilled, onRejected) { - PerformPromiseThen(PerformPromiseThen(promise, onFulfilled, onRejected), void 0, rethrowAssertionErrorRejection); + if (!hasMagic) { + return globUnescape(pattern); } - function uponFulfillment(promise, onFulfilled) { - uponPromise(promise, onFulfilled); + var flags = options.nocase ? "i" : ""; + try { + var regExp = new RegExp("^" + re + "$", flags); + } catch (er) { + return new RegExp("$."); } - function uponRejection(promise, onRejected) { - uponPromise(promise, void 0, onRejected); + regExp._glob = pattern; + regExp._src = re; + return regExp; + } + minimatch2.makeRe = function(pattern, options) { + return new Minimatch2(pattern, options || {}).makeRe(); + }; + Minimatch2.prototype.makeRe = makeRe2; + function makeRe2() { + if (this.regexp || this.regexp === false) return this.regexp; + var set2 = this.set; + if (!set2.length) { + this.regexp = false; + return this.regexp; } - function transformPromiseWith(promise, fulfillmentHandler, rejectionHandler) { - return PerformPromiseThen(promise, fulfillmentHandler, rejectionHandler); + var options = this.options; + var twoStar = options.noglobstar ? star3 : options.dot ? twoStarDot2 : twoStarNoDot2; + var flags = options.nocase ? "i" : ""; + var re = set2.map(function(pattern) { + return pattern.map(function(p) { + return p === GLOBSTAR2 ? twoStar : typeof p === "string" ? regExpEscape3(p) : p._src; + }).join("\\/"); + }).join("|"); + re = "^(?:" + re + ")$"; + if (this.negate) re = "^(?!" + re + ").*$"; + try { + this.regexp = new RegExp(re, flags); + } catch (ex) { + this.regexp = false; } - function setPromiseIsHandledToTrue(promise) { - PerformPromiseThen(promise, void 0, rethrowAssertionErrorRejection); + return this.regexp; + } + minimatch2.match = function(list, pattern, options) { + options = options || {}; + var mm = new Minimatch2(pattern, options); + list = list.filter(function(f3) { + return mm.match(f3); + }); + if (mm.options.nonull && !list.length) { + list.push(pattern); } - let _queueMicrotask = (callback) => { - if (typeof queueMicrotask === "function") { - _queueMicrotask = queueMicrotask; - } else { - const resolvedPromise = promiseResolvedWith(void 0); - _queueMicrotask = (cb) => PerformPromiseThen(resolvedPromise, cb); - } - return _queueMicrotask(callback); - }; - function reflectCall(F2, V, args) { - if (typeof F2 !== "function") { - throw new TypeError("Argument is not a function"); - } - return Function.prototype.apply.call(F2, V, args); + return list; + }; + Minimatch2.prototype.match = function match2(f3, partial) { + if (typeof partial === "undefined") partial = this.partial; + this.debug("match", f3, this.pattern); + if (this.comment) return false; + if (this.empty) return f3 === ""; + if (f3 === "/" && partial) return true; + var options = this.options; + if (path2.sep !== "/") { + f3 = f3.split(path2.sep).join("/"); } - function promiseCall(F2, V, args) { - try { - return promiseResolvedWith(reflectCall(F2, V, args)); - } catch (value) { - return promiseRejectedWith(value); - } + f3 = f3.split(slashSplit); + this.debug(this.pattern, "split", f3); + var set2 = this.set; + this.debug(this.pattern, "set", set2); + var filename; + var i2; + for (i2 = f3.length - 1; i2 >= 0; i2--) { + filename = f3[i2]; + if (filename) break; } - const QUEUE_MAX_ARRAY_SIZE = 16384; - class SimpleQueue { - constructor() { - this._cursor = 0; - this._size = 0; - this._front = { - _elements: [], - _next: void 0 - }; - this._back = this._front; - this._cursor = 0; - this._size = 0; + for (i2 = 0; i2 < set2.length; i2++) { + var pattern = set2[i2]; + var file = f3; + if (options.matchBase && pattern.length === 1) { + file = [filename]; } - get length() { - return this._size; + var hit = this.matchOne(file, pattern, partial); + if (hit) { + if (options.flipNegate) return true; + return !this.negate; } - // For exception safety, this method is structured in order: - // 1. Read state - // 2. Calculate required state mutations - // 3. Perform state mutations - push(element) { - const oldBack = this._back; - let newBack = oldBack; - if (oldBack._elements.length === QUEUE_MAX_ARRAY_SIZE - 1) { - newBack = { - _elements: [], - _next: void 0 - }; - } - oldBack._elements.push(element); - if (newBack !== oldBack) { - this._back = newBack; - oldBack._next = newBack; + } + if (options.flipNegate) return false; + return this.negate; + }; + Minimatch2.prototype.matchOne = function(file, pattern, partial) { + var options = this.options; + this.debug( + "matchOne", + { "this": this, file, pattern } + ); + this.debug("matchOne", file.length, pattern.length); + for (var fi = 0, pi = 0, fl = file.length, pl = pattern.length; fi < fl && pi < pl; fi++, pi++) { + this.debug("matchOne loop"); + var p = pattern[pi]; + var f3 = file[fi]; + this.debug(pattern, p, f3); + if (p === false) return false; + if (p === GLOBSTAR2) { + this.debug("GLOBSTAR", [pattern, p, f3]); + var fr = fi; + var pr = pi + 1; + if (pr === pl) { + this.debug("** at the end"); + for (; fi < fl; fi++) { + if (file[fi] === "." || file[fi] === ".." || !options.dot && file[fi].charAt(0) === ".") return false; + } + return true; } - ++this._size; - } - // Like push(), shift() follows the read -> calculate -> mutate pattern for - // exception safety. - shift() { - const oldFront = this._front; - let newFront = oldFront; - const oldCursor = this._cursor; - let newCursor = oldCursor + 1; - const elements = oldFront._elements; - const element = elements[oldCursor]; - if (newCursor === QUEUE_MAX_ARRAY_SIZE) { - newFront = oldFront._next; - newCursor = 0; + while (fr < fl) { + var swallowee = file[fr]; + this.debug("\nglobstar while", file, fr, pattern, pr, swallowee); + if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) { + this.debug("globstar found match!", fr, fl, swallowee); + return true; + } else { + if (swallowee === "." || swallowee === ".." || !options.dot && swallowee.charAt(0) === ".") { + this.debug("dot detected!", file, fr, pattern, pr); + break; + } + this.debug("globstar swallow a segment, and continue"); + fr++; + } } - --this._size; - this._cursor = newCursor; - if (oldFront !== newFront) { - this._front = newFront; + if (partial) { + this.debug("\n>>> no match, partial?", file, fr, pattern, pr); + if (fr === fl) return true; } - elements[oldCursor] = void 0; - return element; + return false; } - // The tricky thing about forEach() is that it can be called - // re-entrantly. The queue may be mutated inside the callback. It is easy to - // see that push() within the callback has no negative effects since the end - // of the queue is checked for on every iteration. If shift() is called - // repeatedly within the callback then the next iteration may return an - // element that has been removed. In this case the callback will be called - // with undefined values until we either "catch up" with elements that still - // exist or reach the back of the queue. - forEach(callback) { - let i2 = this._cursor; - let node = this._front; - let elements = node._elements; - while (i2 !== elements.length || node._next !== void 0) { - if (i2 === elements.length) { - node = node._next; - elements = node._elements; - i2 = 0; - if (elements.length === 0) { - break; - } - } - callback(elements[i2]); - ++i2; - } - } - // Return the element that would be returned if shift() was called now, - // without modifying the queue. - peek() { - const front = this._front; - const cursor = this._cursor; - return front._elements[cursor]; - } - } - const AbortSteps = Symbol("[[AbortSteps]]"); - const ErrorSteps = Symbol("[[ErrorSteps]]"); - const CancelSteps = Symbol("[[CancelSteps]]"); - const PullSteps = Symbol("[[PullSteps]]"); - const ReleaseSteps = Symbol("[[ReleaseSteps]]"); - function ReadableStreamReaderGenericInitialize(reader, stream) { - reader._ownerReadableStream = stream; - stream._reader = reader; - if (stream._state === "readable") { - defaultReaderClosedPromiseInitialize(reader); - } else if (stream._state === "closed") { - defaultReaderClosedPromiseInitializeAsResolved(reader); - } else { - defaultReaderClosedPromiseInitializeAsRejected(reader, stream._storedError); - } - } - function ReadableStreamReaderGenericCancel(reader, reason) { - const stream = reader._ownerReadableStream; - return ReadableStreamCancel(stream, reason); - } - function ReadableStreamReaderGenericRelease(reader) { - const stream = reader._ownerReadableStream; - if (stream._state === "readable") { - defaultReaderClosedPromiseReject(reader, new TypeError(`Reader was released and can no longer be used to monitor the stream's closedness`)); + var hit; + if (typeof p === "string") { + hit = f3 === p; + this.debug("string match", p, f3, hit); } else { - defaultReaderClosedPromiseResetToRejected(reader, new TypeError(`Reader was released and can no longer be used to monitor the stream's closedness`)); - } - stream._readableStreamController[ReleaseSteps](); - stream._reader = void 0; - reader._ownerReadableStream = void 0; - } - function readerLockException(name) { - return new TypeError("Cannot " + name + " a stream using a released reader"); - } - function defaultReaderClosedPromiseInitialize(reader) { - reader._closedPromise = newPromise((resolve, reject) => { - reader._closedPromise_resolve = resolve; - reader._closedPromise_reject = reject; - }); - } - function defaultReaderClosedPromiseInitializeAsRejected(reader, reason) { - defaultReaderClosedPromiseInitialize(reader); - defaultReaderClosedPromiseReject(reader, reason); - } - function defaultReaderClosedPromiseInitializeAsResolved(reader) { - defaultReaderClosedPromiseInitialize(reader); - defaultReaderClosedPromiseResolve(reader); - } - function defaultReaderClosedPromiseReject(reader, reason) { - if (reader._closedPromise_reject === void 0) { - return; + hit = f3.match(p); + this.debug("pattern match", p, f3, hit); } - setPromiseIsHandledToTrue(reader._closedPromise); - reader._closedPromise_reject(reason); - reader._closedPromise_resolve = void 0; - reader._closedPromise_reject = void 0; + if (!hit) return false; } - function defaultReaderClosedPromiseResetToRejected(reader, reason) { - defaultReaderClosedPromiseInitializeAsRejected(reader, reason); + if (fi === fl && pi === pl) { + return true; + } else if (fi === fl) { + return partial; + } else if (pi === pl) { + return fi === fl - 1 && file[fi] === ""; } - function defaultReaderClosedPromiseResolve(reader) { - if (reader._closedPromise_resolve === void 0) { - return; + throw new Error("wtf?"); + }; + function globUnescape(s2) { + return s2.replace(/\\(.)/g, "$1"); + } + function regExpEscape3(s2) { + return s2.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, "\\$&"); + } + } +}); + +// node_modules/inherits/inherits_browser.js +var require_inherits_browser = __commonJS({ + "node_modules/inherits/inherits_browser.js"(exports2, module2) { + "use strict"; + if (typeof Object.create === "function") { + module2.exports = function inherits(ctor, superCtor) { + if (superCtor) { + ctor.super_ = superCtor; + ctor.prototype = Object.create(superCtor.prototype, { + constructor: { + value: ctor, + enumerable: false, + writable: true, + configurable: true + } + }); } - reader._closedPromise_resolve(void 0); - reader._closedPromise_resolve = void 0; - reader._closedPromise_reject = void 0; - } - const NumberIsFinite = Number.isFinite || function(x2) { - return typeof x2 === "number" && isFinite(x2); - }; - const MathTrunc = Math.trunc || function(v) { - return v < 0 ? Math.ceil(v) : Math.floor(v); }; - function isDictionary(x2) { - return typeof x2 === "object" || typeof x2 === "function"; - } - function assertDictionary(obj, context2) { - if (obj !== void 0 && !isDictionary(obj)) { - throw new TypeError(`${context2} is not an object.`); - } - } - function assertFunction(x2, context2) { - if (typeof x2 !== "function") { - throw new TypeError(`${context2} is not a function.`); + } else { + module2.exports = function inherits(ctor, superCtor) { + if (superCtor) { + ctor.super_ = superCtor; + var TempCtor = function() { + }; + TempCtor.prototype = superCtor.prototype; + ctor.prototype = new TempCtor(); + ctor.prototype.constructor = ctor; } + }; + } + } +}); + +// node_modules/inherits/inherits.js +var require_inherits = __commonJS({ + "node_modules/inherits/inherits.js"(exports2, module2) { + "use strict"; + try { + util = require("util"); + if (typeof util.inherits !== "function") throw ""; + module2.exports = util.inherits; + } catch (e2) { + module2.exports = require_inherits_browser(); + } + var util; + } +}); + +// node_modules/path-is-absolute/index.js +var require_path_is_absolute = __commonJS({ + "node_modules/path-is-absolute/index.js"(exports2, module2) { + "use strict"; + function posix(path2) { + return path2.charAt(0) === "/"; + } + function win32(path2) { + var splitDeviceRe = /^([a-zA-Z]:|[\\\/]{2}[^\\\/]+[\\\/]+[^\\\/]+)?([\\\/])?([\s\S]*?)$/; + var result = splitDeviceRe.exec(path2); + var device = result[1] || ""; + var isUnc = Boolean(device && device.charAt(1) !== ":"); + return Boolean(result[2] || isUnc); + } + module2.exports = process.platform === "win32" ? win32 : posix; + module2.exports.posix = posix; + module2.exports.win32 = win32; + } +}); + +// node_modules/glob/common.js +var require_common3 = __commonJS({ + "node_modules/glob/common.js"(exports2) { + "use strict"; + exports2.setopts = setopts; + exports2.ownProp = ownProp; + exports2.makeAbs = makeAbs; + exports2.finish = finish; + exports2.mark = mark; + exports2.isIgnored = isIgnored; + exports2.childrenIgnored = childrenIgnored; + function ownProp(obj, field) { + return Object.prototype.hasOwnProperty.call(obj, field); + } + var fs2 = require("fs"); + var path2 = require("path"); + var minimatch2 = require_minimatch(); + var isAbsolute = require_path_is_absolute(); + var Minimatch2 = minimatch2.Minimatch; + function alphasort(a, b) { + return a.localeCompare(b, "en"); + } + function setupIgnores(self2, options) { + self2.ignore = options.ignore || []; + if (!Array.isArray(self2.ignore)) + self2.ignore = [self2.ignore]; + if (self2.ignore.length) { + self2.ignore = self2.ignore.map(ignoreMap); } - function isObject2(x2) { - return typeof x2 === "object" && x2 !== null || typeof x2 === "function"; + } + function ignoreMap(pattern) { + var gmatcher = null; + if (pattern.slice(-3) === "/**") { + var gpattern = pattern.replace(/(\/\*\*)+$/, ""); + gmatcher = new Minimatch2(gpattern, { dot: true }); } - function assertObject(x2, context2) { - if (!isObject2(x2)) { - throw new TypeError(`${context2} is not an object.`); + return { + matcher: new Minimatch2(pattern, { dot: true }), + gmatcher + }; + } + function setopts(self2, pattern, options) { + if (!options) + options = {}; + if (options.matchBase && -1 === pattern.indexOf("/")) { + if (options.noglobstar) { + throw new Error("base matching requires globstar"); } + pattern = "**/" + pattern; } - function assertRequiredArgument(x2, position, context2) { - if (x2 === void 0) { - throw new TypeError(`Parameter ${position} is required in '${context2}'.`); - } + self2.silent = !!options.silent; + self2.pattern = pattern; + self2.strict = options.strict !== false; + self2.realpath = !!options.realpath; + self2.realpathCache = options.realpathCache || /* @__PURE__ */ Object.create(null); + self2.follow = !!options.follow; + self2.dot = !!options.dot; + self2.mark = !!options.mark; + self2.nodir = !!options.nodir; + if (self2.nodir) + self2.mark = true; + self2.sync = !!options.sync; + self2.nounique = !!options.nounique; + self2.nonull = !!options.nonull; + self2.nosort = !!options.nosort; + self2.nocase = !!options.nocase; + self2.stat = !!options.stat; + self2.noprocess = !!options.noprocess; + self2.absolute = !!options.absolute; + self2.fs = options.fs || fs2; + self2.maxLength = options.maxLength || Infinity; + self2.cache = options.cache || /* @__PURE__ */ Object.create(null); + self2.statCache = options.statCache || /* @__PURE__ */ Object.create(null); + self2.symlinks = options.symlinks || /* @__PURE__ */ Object.create(null); + setupIgnores(self2, options); + self2.changedCwd = false; + var cwd = process.cwd(); + if (!ownProp(options, "cwd")) + self2.cwd = cwd; + else { + self2.cwd = path2.resolve(options.cwd); + self2.changedCwd = self2.cwd !== cwd; } - function assertRequiredField(x2, field, context2) { - if (x2 === void 0) { - throw new TypeError(`${field} is required in '${context2}'.`); + self2.root = options.root || path2.resolve(self2.cwd, "/"); + self2.root = path2.resolve(self2.root); + if (process.platform === "win32") + self2.root = self2.root.replace(/\\/g, "/"); + self2.cwdAbs = isAbsolute(self2.cwd) ? self2.cwd : makeAbs(self2, self2.cwd); + if (process.platform === "win32") + self2.cwdAbs = self2.cwdAbs.replace(/\\/g, "/"); + self2.nomount = !!options.nomount; + options.nonegate = true; + options.nocomment = true; + options.allowWindowsEscape = false; + self2.minimatch = new Minimatch2(pattern, options); + self2.options = self2.minimatch.options; + } + function finish(self2) { + var nou = self2.nounique; + var all = nou ? [] : /* @__PURE__ */ Object.create(null); + for (var i2 = 0, l = self2.matches.length; i2 < l; i2++) { + var matches = self2.matches[i2]; + if (!matches || Object.keys(matches).length === 0) { + if (self2.nonull) { + var literal = self2.minimatch.globSet[i2]; + if (nou) + all.push(literal); + else + all[literal] = true; + } + } else { + var m2 = Object.keys(matches); + if (nou) + all.push.apply(all, m2); + else + m2.forEach(function(m3) { + all[m3] = true; + }); } } - function convertUnrestrictedDouble(value) { - return Number(value); - } - function censorNegativeZero(x2) { - return x2 === 0 ? 0 : x2; - } - function integerPart(x2) { - return censorNegativeZero(MathTrunc(x2)); - } - function convertUnsignedLongLongWithEnforceRange(value, context2) { - const lowerBound = 0; - const upperBound = Number.MAX_SAFE_INTEGER; - let x2 = Number(value); - x2 = censorNegativeZero(x2); - if (!NumberIsFinite(x2)) { - throw new TypeError(`${context2} is not a finite number`); - } - x2 = integerPart(x2); - if (x2 < lowerBound || x2 > upperBound) { - throw new TypeError(`${context2} is outside the accepted range of ${lowerBound} to ${upperBound}, inclusive`); + if (!nou) + all = Object.keys(all); + if (!self2.nosort) + all = all.sort(alphasort); + if (self2.mark) { + for (var i2 = 0; i2 < all.length; i2++) { + all[i2] = self2._mark(all[i2]); } - if (!NumberIsFinite(x2) || x2 === 0) { - return 0; + if (self2.nodir) { + all = all.filter(function(e2) { + var notDir = !/\/$/.test(e2); + var c = self2.cache[e2] || self2.cache[makeAbs(self2, e2)]; + if (notDir && c) + notDir = c !== "DIR" && !Array.isArray(c); + return notDir; + }); } - return x2; } - function assertReadableStream(x2, context2) { - if (!IsReadableStream(x2)) { - throw new TypeError(`${context2} is not a ReadableStream.`); + if (self2.ignore.length) + all = all.filter(function(m3) { + return !isIgnored(self2, m3); + }); + self2.found = all; + } + function mark(self2, p) { + var abs = makeAbs(self2, p); + var c = self2.cache[abs]; + var m2 = p; + if (c) { + var isDir = c === "DIR" || Array.isArray(c); + var slash = p.slice(-1) === "/"; + if (isDir && !slash) + m2 += "/"; + else if (!isDir && slash) + m2 = m2.slice(0, -1); + if (m2 !== p) { + var mabs = makeAbs(self2, m2); + self2.statCache[mabs] = self2.statCache[abs]; + self2.cache[mabs] = self2.cache[abs]; } } - function AcquireReadableStreamDefaultReader(stream) { - return new ReadableStreamDefaultReader(stream); + return m2; + } + function makeAbs(self2, f3) { + var abs = f3; + if (f3.charAt(0) === "/") { + abs = path2.join(self2.root, f3); + } else if (isAbsolute(f3) || f3 === "") { + abs = f3; + } else if (self2.changedCwd) { + abs = path2.resolve(self2.cwd, f3); + } else { + abs = path2.resolve(f3); } - function ReadableStreamAddReadRequest(stream, readRequest) { - stream._reader._readRequests.push(readRequest); + if (process.platform === "win32") + abs = abs.replace(/\\/g, "/"); + return abs; + } + function isIgnored(self2, path3) { + if (!self2.ignore.length) + return false; + return self2.ignore.some(function(item) { + return item.matcher.match(path3) || !!(item.gmatcher && item.gmatcher.match(path3)); + }); + } + function childrenIgnored(self2, path3) { + if (!self2.ignore.length) + return false; + return self2.ignore.some(function(item) { + return !!(item.gmatcher && item.gmatcher.match(path3)); + }); + } + } +}); + +// node_modules/glob/sync.js +var require_sync = __commonJS({ + "node_modules/glob/sync.js"(exports2, module2) { + "use strict"; + module2.exports = globSync; + globSync.GlobSync = GlobSync; + var rp = require_fs(); + var minimatch2 = require_minimatch(); + var Minimatch2 = minimatch2.Minimatch; + var Glob = require_glob().Glob; + var util = require("util"); + var path2 = require("path"); + var assert = require("assert"); + var isAbsolute = require_path_is_absolute(); + var common = require_common3(); + var setopts = common.setopts; + var ownProp = common.ownProp; + var childrenIgnored = common.childrenIgnored; + var isIgnored = common.isIgnored; + function globSync(pattern, options) { + if (typeof options === "function" || arguments.length === 3) + throw new TypeError("callback provided to sync glob\nSee: https://github.com/isaacs/node-glob/issues/167"); + return new GlobSync(pattern, options).found; + } + function GlobSync(pattern, options) { + if (!pattern) + throw new Error("must provide pattern"); + if (typeof options === "function" || arguments.length === 3) + throw new TypeError("callback provided to sync glob\nSee: https://github.com/isaacs/node-glob/issues/167"); + if (!(this instanceof GlobSync)) + return new GlobSync(pattern, options); + setopts(this, pattern, options); + if (this.noprocess) + return this; + var n = this.minimatch.set.length; + this.matches = new Array(n); + for (var i2 = 0; i2 < n; i2++) { + this._process(this.minimatch.set[i2], i2, false); } - function ReadableStreamFulfillReadRequest(stream, chunk, done) { - const reader = stream._reader; - const readRequest = reader._readRequests.shift(); - if (done) { - readRequest._closeSteps(); - } else { - readRequest._chunkSteps(chunk); - } + this._finish(); + } + GlobSync.prototype._finish = function() { + assert.ok(this instanceof GlobSync); + if (this.realpath) { + var self2 = this; + this.matches.forEach(function(matchset, index) { + var set2 = self2.matches[index] = /* @__PURE__ */ Object.create(null); + for (var p in matchset) { + try { + p = self2._makeAbs(p); + var real = rp.realpathSync(p, self2.realpathCache); + set2[real] = true; + } catch (er) { + if (er.syscall === "stat") + set2[self2._makeAbs(p)] = true; + else + throw er; + } + } + }); } - function ReadableStreamGetNumReadRequests(stream) { - return stream._reader._readRequests.length; + common.finish(this); + }; + GlobSync.prototype._process = function(pattern, index, inGlobStar) { + assert.ok(this instanceof GlobSync); + var n = 0; + while (typeof pattern[n] === "string") { + n++; } - function ReadableStreamHasDefaultReader(stream) { - const reader = stream._reader; - if (reader === void 0) { - return false; - } - if (!IsReadableStreamDefaultReader(reader)) { - return false; - } - return true; + var prefix; + switch (n) { + // if not, then this is rather simple + case pattern.length: + this._processSimple(pattern.join("/"), index); + return; + case 0: + prefix = null; + break; + default: + prefix = pattern.slice(0, n).join("/"); + break; } - class ReadableStreamDefaultReader { - constructor(stream) { - assertRequiredArgument(stream, 1, "ReadableStreamDefaultReader"); - assertReadableStream(stream, "First parameter"); - if (IsReadableStreamLocked(stream)) { - throw new TypeError("This stream has already been locked for exclusive reading by another reader"); - } - ReadableStreamReaderGenericInitialize(this, stream); - this._readRequests = new SimpleQueue(); - } - /** - * Returns a promise that will be fulfilled when the stream becomes closed, - * or rejected if the stream ever errors or the reader's lock is released before the stream finishes closing. - */ - get closed() { - if (!IsReadableStreamDefaultReader(this)) { - return promiseRejectedWith(defaultReaderBrandCheckException("closed")); - } - return this._closedPromise; - } - /** - * If the reader is active, behaves the same as {@link ReadableStream.cancel | stream.cancel(reason)}. - */ - cancel(reason = void 0) { - if (!IsReadableStreamDefaultReader(this)) { - return promiseRejectedWith(defaultReaderBrandCheckException("cancel")); - } - if (this._ownerReadableStream === void 0) { - return promiseRejectedWith(readerLockException("cancel")); - } - return ReadableStreamReaderGenericCancel(this, reason); - } - /** - * Returns a promise that allows access to the next chunk from the stream's internal queue, if available. - * - * If reading a chunk causes the queue to become empty, more data will be pulled from the underlying source. - */ - read() { - if (!IsReadableStreamDefaultReader(this)) { - return promiseRejectedWith(defaultReaderBrandCheckException("read")); - } - if (this._ownerReadableStream === void 0) { - return promiseRejectedWith(readerLockException("read from")); + var remain = pattern.slice(n); + var read; + if (prefix === null) + read = "."; + else if (isAbsolute(prefix) || isAbsolute(pattern.map(function(p) { + return typeof p === "string" ? p : "[*]"; + }).join("/"))) { + if (!prefix || !isAbsolute(prefix)) + prefix = "/" + prefix; + read = prefix; + } else + read = prefix; + var abs = this._makeAbs(read); + if (childrenIgnored(this, read)) + return; + var isGlobStar = remain[0] === minimatch2.GLOBSTAR; + if (isGlobStar) + this._processGlobStar(prefix, read, abs, remain, index, inGlobStar); + else + this._processReaddir(prefix, read, abs, remain, index, inGlobStar); + }; + GlobSync.prototype._processReaddir = function(prefix, read, abs, remain, index, inGlobStar) { + var entries = this._readdir(abs, inGlobStar); + if (!entries) + return; + var pn = remain[0]; + var negate = !!this.minimatch.negate; + var rawGlob = pn._glob; + var dotOk = this.dot || rawGlob.charAt(0) === "."; + var matchedEntries = []; + for (var i2 = 0; i2 < entries.length; i2++) { + var e2 = entries[i2]; + if (e2.charAt(0) !== "." || dotOk) { + var m2; + if (negate && !prefix) { + m2 = !e2.match(pn); + } else { + m2 = e2.match(pn); } - let resolvePromise; - let rejectPromise; - const promise = newPromise((resolve, reject) => { - resolvePromise = resolve; - rejectPromise = reject; - }); - const readRequest = { - _chunkSteps: (chunk) => resolvePromise({ value: chunk, done: false }), - _closeSteps: () => resolvePromise({ value: void 0, done: true }), - _errorSteps: (e2) => rejectPromise(e2) - }; - ReadableStreamDefaultReaderRead(this, readRequest); - return promise; + if (m2) + matchedEntries.push(e2); } - /** - * Releases the reader's lock on the corresponding stream. After the lock is released, the reader is no longer active. - * If the associated stream is errored when the lock is released, the reader will appear errored in the same way - * from now on; otherwise, the reader will appear closed. - * - * A reader's lock cannot be released while it still has a pending read request, i.e., if a promise returned by - * the reader's {@link ReadableStreamDefaultReader.read | read()} method has not yet been settled. Attempting to - * do so will throw a `TypeError` and leave the reader locked to the stream. - */ - releaseLock() { - if (!IsReadableStreamDefaultReader(this)) { - throw defaultReaderBrandCheckException("releaseLock"); + } + var len = matchedEntries.length; + if (len === 0) + return; + if (remain.length === 1 && !this.mark && !this.stat) { + if (!this.matches[index]) + this.matches[index] = /* @__PURE__ */ Object.create(null); + for (var i2 = 0; i2 < len; i2++) { + var e2 = matchedEntries[i2]; + if (prefix) { + if (prefix.slice(-1) !== "/") + e2 = prefix + "/" + e2; + else + e2 = prefix + e2; } - if (this._ownerReadableStream === void 0) { - return; + if (e2.charAt(0) === "/" && !this.nomount) { + e2 = path2.join(this.root, e2); } - ReadableStreamDefaultReaderRelease(this); + this._emitMatch(index, e2); } + return; } - Object.defineProperties(ReadableStreamDefaultReader.prototype, { - cancel: { enumerable: true }, - read: { enumerable: true }, - releaseLock: { enumerable: true }, - closed: { enumerable: true } - }); - setFunctionName(ReadableStreamDefaultReader.prototype.cancel, "cancel"); - setFunctionName(ReadableStreamDefaultReader.prototype.read, "read"); - setFunctionName(ReadableStreamDefaultReader.prototype.releaseLock, "releaseLock"); - if (typeof Symbol.toStringTag === "symbol") { - Object.defineProperty(ReadableStreamDefaultReader.prototype, Symbol.toStringTag, { - value: "ReadableStreamDefaultReader", - configurable: true - }); + remain.shift(); + for (var i2 = 0; i2 < len; i2++) { + var e2 = matchedEntries[i2]; + var newPattern; + if (prefix) + newPattern = [prefix, e2]; + else + newPattern = [e2]; + this._process(newPattern.concat(remain), index, inGlobStar); } - function IsReadableStreamDefaultReader(x2) { - if (!typeIsObject(x2)) { - return false; - } - if (!Object.prototype.hasOwnProperty.call(x2, "_readRequests")) { - return false; - } - return x2 instanceof ReadableStreamDefaultReader; + }; + GlobSync.prototype._emitMatch = function(index, e2) { + if (isIgnored(this, e2)) + return; + var abs = this._makeAbs(e2); + if (this.mark) + e2 = this._mark(e2); + if (this.absolute) { + e2 = abs; } - function ReadableStreamDefaultReaderRead(reader, readRequest) { - const stream = reader._ownerReadableStream; - stream._disturbed = true; - if (stream._state === "closed") { - readRequest._closeSteps(); - } else if (stream._state === "errored") { - readRequest._errorSteps(stream._storedError); - } else { - stream._readableStreamController[PullSteps](readRequest); - } + if (this.matches[index][e2]) + return; + if (this.nodir) { + var c = this.cache[abs]; + if (c === "DIR" || Array.isArray(c)) + return; } - function ReadableStreamDefaultReaderRelease(reader) { - ReadableStreamReaderGenericRelease(reader); - const e2 = new TypeError("Reader was released"); - ReadableStreamDefaultReaderErrorReadRequests(reader, e2); + this.matches[index][e2] = true; + if (this.stat) + this._stat(e2); + }; + GlobSync.prototype._readdirInGlobStar = function(abs) { + if (this.follow) + return this._readdir(abs, false); + var entries; + var lstat; + var stat2; + try { + lstat = this.fs.lstatSync(abs); + } catch (er) { + if (er.code === "ENOENT") { + return null; + } } - function ReadableStreamDefaultReaderErrorReadRequests(reader, e2) { - const readRequests = reader._readRequests; - reader._readRequests = new SimpleQueue(); - readRequests.forEach((readRequest) => { - readRequest._errorSteps(e2); - }); + var isSym = lstat && lstat.isSymbolicLink(); + this.symlinks[abs] = isSym; + if (!isSym && lstat && !lstat.isDirectory()) + this.cache[abs] = "FILE"; + else + entries = this._readdir(abs, false); + return entries; + }; + GlobSync.prototype._readdir = function(abs, inGlobStar) { + var entries; + if (inGlobStar && !ownProp(this.symlinks, abs)) + return this._readdirInGlobStar(abs); + if (ownProp(this.cache, abs)) { + var c = this.cache[abs]; + if (!c || c === "FILE") + return null; + if (Array.isArray(c)) + return c; } - function defaultReaderBrandCheckException(name) { - return new TypeError(`ReadableStreamDefaultReader.prototype.${name} can only be used on a ReadableStreamDefaultReader`); + try { + return this._readdirEntries(abs, this.fs.readdirSync(abs)); + } catch (er) { + this._readdirError(abs, er); + return null; } - const AsyncIteratorPrototype = Object.getPrototypeOf(Object.getPrototypeOf(async function* () { - }).prototype); - class ReadableStreamAsyncIteratorImpl { - constructor(reader, preventCancel) { - this._ongoingPromise = void 0; - this._isFinished = false; - this._reader = reader; - this._preventCancel = preventCancel; - } - next() { - const nextSteps = () => this._nextSteps(); - this._ongoingPromise = this._ongoingPromise ? transformPromiseWith(this._ongoingPromise, nextSteps, nextSteps) : nextSteps(); - return this._ongoingPromise; - } - return(value) { - const returnSteps = () => this._returnSteps(value); - return this._ongoingPromise ? transformPromiseWith(this._ongoingPromise, returnSteps, returnSteps) : returnSteps(); - } - _nextSteps() { - if (this._isFinished) { - return Promise.resolve({ value: void 0, done: true }); - } - const reader = this._reader; - let resolvePromise; - let rejectPromise; - const promise = newPromise((resolve, reject) => { - resolvePromise = resolve; - rejectPromise = reject; - }); - const readRequest = { - _chunkSteps: (chunk) => { - this._ongoingPromise = void 0; - _queueMicrotask(() => resolvePromise({ value: chunk, done: false })); - }, - _closeSteps: () => { - this._ongoingPromise = void 0; - this._isFinished = true; - ReadableStreamReaderGenericRelease(reader); - resolvePromise({ value: void 0, done: true }); - }, - _errorSteps: (reason) => { - this._ongoingPromise = void 0; - this._isFinished = true; - ReadableStreamReaderGenericRelease(reader); - rejectPromise(reason); - } - }; - ReadableStreamDefaultReaderRead(reader, readRequest); - return promise; - } - _returnSteps(value) { - if (this._isFinished) { - return Promise.resolve({ value, done: true }); - } - this._isFinished = true; - const reader = this._reader; - if (!this._preventCancel) { - const result = ReadableStreamReaderGenericCancel(reader, value); - ReadableStreamReaderGenericRelease(reader); - return transformPromiseWith(result, () => ({ value, done: true })); - } - ReadableStreamReaderGenericRelease(reader); - return promiseResolvedWith({ value, done: true }); + }; + GlobSync.prototype._readdirEntries = function(abs, entries) { + if (!this.mark && !this.stat) { + for (var i2 = 0; i2 < entries.length; i2++) { + var e2 = entries[i2]; + if (abs === "/") + e2 = abs + e2; + else + e2 = abs + "/" + e2; + this.cache[e2] = true; } } - const ReadableStreamAsyncIteratorPrototype = { - next() { - if (!IsReadableStreamAsyncIterator(this)) { - return promiseRejectedWith(streamAsyncIteratorBrandCheckException("next")); - } - return this._asyncIteratorImpl.next(); - }, - return(value) { - if (!IsReadableStreamAsyncIterator(this)) { - return promiseRejectedWith(streamAsyncIteratorBrandCheckException("return")); + this.cache[abs] = entries; + return entries; + }; + GlobSync.prototype._readdirError = function(f3, er) { + switch (er.code) { + case "ENOTSUP": + // https://github.com/isaacs/node-glob/issues/205 + case "ENOTDIR": + var abs = this._makeAbs(f3); + this.cache[abs] = "FILE"; + if (abs === this.cwdAbs) { + var error = new Error(er.code + " invalid cwd " + this.cwd); + error.path = this.cwd; + error.code = er.code; + throw error; } - return this._asyncIteratorImpl.return(value); - } - }; - Object.setPrototypeOf(ReadableStreamAsyncIteratorPrototype, AsyncIteratorPrototype); - function AcquireReadableStreamAsyncIterator(stream, preventCancel) { - const reader = AcquireReadableStreamDefaultReader(stream); - const impl = new ReadableStreamAsyncIteratorImpl(reader, preventCancel); - const iterator2 = Object.create(ReadableStreamAsyncIteratorPrototype); - iterator2._asyncIteratorImpl = impl; - return iterator2; - } - function IsReadableStreamAsyncIterator(x2) { - if (!typeIsObject(x2)) { - return false; - } - if (!Object.prototype.hasOwnProperty.call(x2, "_asyncIteratorImpl")) { - return false; - } - try { - return x2._asyncIteratorImpl instanceof ReadableStreamAsyncIteratorImpl; - } catch (_a3) { - return false; - } - } - function streamAsyncIteratorBrandCheckException(name) { - return new TypeError(`ReadableStreamAsyncIterator.${name} can only be used on a ReadableSteamAsyncIterator`); - } - const NumberIsNaN = Number.isNaN || function(x2) { - return x2 !== x2; - }; - var _a2, _b, _c; - function CreateArrayFromList(elements) { - return elements.slice(); + break; + case "ENOENT": + // not terribly unusual + case "ELOOP": + case "ENAMETOOLONG": + case "UNKNOWN": + this.cache[this._makeAbs(f3)] = false; + break; + default: + this.cache[this._makeAbs(f3)] = false; + if (this.strict) + throw er; + if (!this.silent) + console.error("glob error", er); + break; } - function CopyDataBlockBytes(dest, destOffset, src, srcOffset, n) { - new Uint8Array(dest).set(new Uint8Array(src, srcOffset, n), destOffset); + }; + GlobSync.prototype._processGlobStar = function(prefix, read, abs, remain, index, inGlobStar) { + var entries = this._readdir(abs, inGlobStar); + if (!entries) + return; + var remainWithoutGlobStar = remain.slice(1); + var gspref = prefix ? [prefix] : []; + var noGlobStar = gspref.concat(remainWithoutGlobStar); + this._process(noGlobStar, index, false); + var len = entries.length; + var isSym = this.symlinks[abs]; + if (isSym && inGlobStar) + return; + for (var i2 = 0; i2 < len; i2++) { + var e2 = entries[i2]; + if (e2.charAt(0) === "." && !this.dot) + continue; + var instead = gspref.concat(entries[i2], remainWithoutGlobStar); + this._process(instead, index, true); + var below = gspref.concat(entries[i2], remain); + this._process(below, index, true); } - let TransferArrayBuffer = (O) => { - if (typeof O.transfer === "function") { - TransferArrayBuffer = (buffer) => buffer.transfer(); - } else if (typeof structuredClone === "function") { - TransferArrayBuffer = (buffer) => structuredClone(buffer, { transfer: [buffer] }); - } else { - TransferArrayBuffer = (buffer) => buffer; - } - return TransferArrayBuffer(O); - }; - let IsDetachedBuffer = (O) => { - if (typeof O.detached === "boolean") { - IsDetachedBuffer = (buffer) => buffer.detached; + }; + GlobSync.prototype._processSimple = function(prefix, index) { + var exists2 = this._stat(prefix); + if (!this.matches[index]) + this.matches[index] = /* @__PURE__ */ Object.create(null); + if (!exists2) + return; + if (prefix && isAbsolute(prefix) && !this.nomount) { + var trail = /[\/\\]$/.test(prefix); + if (prefix.charAt(0) === "/") { + prefix = path2.join(this.root, prefix); } else { - IsDetachedBuffer = (buffer) => buffer.byteLength === 0; - } - return IsDetachedBuffer(O); - }; - function ArrayBufferSlice(buffer, begin, end) { - if (buffer.slice) { - return buffer.slice(begin, end); - } - const length = end - begin; - const slice = new ArrayBuffer(length); - CopyDataBlockBytes(slice, 0, buffer, begin, length); - return slice; - } - function GetMethod(receiver, prop) { - const func = receiver[prop]; - if (func === void 0 || func === null) { - return void 0; - } - if (typeof func !== "function") { - throw new TypeError(`${String(prop)} is not a function`); + prefix = path2.resolve(this.root, prefix); + if (trail) + prefix += "/"; } - return func; } - function CreateAsyncFromSyncIterator(syncIteratorRecord) { - const syncIterable = { - [Symbol.iterator]: () => syncIteratorRecord.iterator - }; - const asyncIterator = async function* () { - return yield* syncIterable; - }(); - const nextMethod = asyncIterator.next; - return { iterator: asyncIterator, nextMethod, done: false }; + if (process.platform === "win32") + prefix = prefix.replace(/\\/g, "/"); + this._emitMatch(index, prefix); + }; + GlobSync.prototype._stat = function(f3) { + var abs = this._makeAbs(f3); + var needDir = f3.slice(-1) === "/"; + if (f3.length > this.maxLength) + return false; + if (!this.stat && ownProp(this.cache, abs)) { + var c = this.cache[abs]; + if (Array.isArray(c)) + c = "DIR"; + if (!needDir || c === "DIR") + return c; + if (needDir && c === "FILE") + return false; } - const SymbolAsyncIterator = (_c = (_a2 = Symbol.asyncIterator) !== null && _a2 !== void 0 ? _a2 : (_b = Symbol.for) === null || _b === void 0 ? void 0 : _b.call(Symbol, "Symbol.asyncIterator")) !== null && _c !== void 0 ? _c : "@@asyncIterator"; - function GetIterator(obj, hint = "sync", method) { - if (method === void 0) { - if (hint === "async") { - method = GetMethod(obj, SymbolAsyncIterator); - if (method === void 0) { - const syncMethod = GetMethod(obj, Symbol.iterator); - const syncIteratorRecord = GetIterator(obj, "sync", syncMethod); - return CreateAsyncFromSyncIterator(syncIteratorRecord); - } - } else { - method = GetMethod(obj, Symbol.iterator); + var exists2; + var stat2 = this.statCache[abs]; + if (!stat2) { + var lstat; + try { + lstat = this.fs.lstatSync(abs); + } catch (er) { + if (er && (er.code === "ENOENT" || er.code === "ENOTDIR")) { + this.statCache[abs] = false; + return false; } } - if (method === void 0) { - throw new TypeError("The object is not iterable"); - } - const iterator2 = reflectCall(method, obj, []); - if (!typeIsObject(iterator2)) { - throw new TypeError("The iterator method must return an object"); + if (lstat && lstat.isSymbolicLink()) { + try { + stat2 = this.fs.statSync(abs); + } catch (er) { + stat2 = lstat; + } + } else { + stat2 = lstat; } - const nextMethod = iterator2.next; - return { iterator: iterator2, nextMethod, done: false }; } - function IteratorNext(iteratorRecord) { - const result = reflectCall(iteratorRecord.nextMethod, iteratorRecord.iterator, []); - if (!typeIsObject(result)) { - throw new TypeError("The iterator.next() method must return an object"); - } - return result; - } - function IteratorComplete(iterResult) { - return Boolean(iterResult.done); - } - function IteratorValue(iterResult) { - return iterResult.value; + this.statCache[abs] = stat2; + var c = true; + if (stat2) + c = stat2.isDirectory() ? "DIR" : "FILE"; + this.cache[abs] = this.cache[abs] || c; + if (needDir && c === "FILE") + return false; + return c; + }; + GlobSync.prototype._mark = function(p) { + return common.mark(this, p); + }; + GlobSync.prototype._makeAbs = function(f3) { + return common.makeAbs(this, f3); + }; + } +}); + +// node_modules/inflight/inflight.js +var require_inflight = __commonJS({ + "node_modules/inflight/inflight.js"(exports2, module2) { + "use strict"; + var wrappy = require_wrappy(); + var reqs = /* @__PURE__ */ Object.create(null); + var once2 = require_once(); + module2.exports = wrappy(inflight); + function inflight(key, cb) { + if (reqs[key]) { + reqs[key].push(cb); + return null; + } else { + reqs[key] = [cb]; + return makeres(key); } - function IsNonNegativeNumber(v) { - if (typeof v !== "number") { - return false; - } - if (NumberIsNaN(v)) { - return false; - } - if (v < 0) { - return false; + } + function makeres(key) { + return once2(function RES() { + var cbs = reqs[key]; + var len = cbs.length; + var args = slice(arguments); + try { + for (var i2 = 0; i2 < len; i2++) { + cbs[i2].apply(null, args); + } + } finally { + if (cbs.length > len) { + cbs.splice(0, len); + process.nextTick(function() { + RES.apply(null, args); + }); + } else { + delete reqs[key]; + } } - return true; - } - function CloneAsUint8Array(O) { - const buffer = ArrayBufferSlice(O.buffer, O.byteOffset, O.byteOffset + O.byteLength); - return new Uint8Array(buffer); + }); + } + function slice(args) { + var length = args.length; + var array = []; + for (var i2 = 0; i2 < length; i2++) array[i2] = args[i2]; + return array; + } + } +}); + +// node_modules/glob/glob.js +var require_glob = __commonJS({ + "node_modules/glob/glob.js"(exports2, module2) { + "use strict"; + module2.exports = glob; + var rp = require_fs(); + var minimatch2 = require_minimatch(); + var Minimatch2 = minimatch2.Minimatch; + var inherits = require_inherits(); + var EE = require("events").EventEmitter; + var path2 = require("path"); + var assert = require("assert"); + var isAbsolute = require_path_is_absolute(); + var globSync = require_sync(); + var common = require_common3(); + var setopts = common.setopts; + var ownProp = common.ownProp; + var inflight = require_inflight(); + var util = require("util"); + var childrenIgnored = common.childrenIgnored; + var isIgnored = common.isIgnored; + var once2 = require_once(); + function glob(pattern, options, cb) { + if (typeof options === "function") cb = options, options = {}; + if (!options) options = {}; + if (options.sync) { + if (cb) + throw new TypeError("callback provided to sync glob"); + return globSync(pattern, options); } - function DequeueValue(container) { - const pair = container._queue.shift(); - container._queueTotalSize -= pair.size; - if (container._queueTotalSize < 0) { - container._queueTotalSize = 0; - } - return pair.value; + return new Glob(pattern, options, cb); + } + glob.sync = globSync; + var GlobSync = glob.GlobSync = globSync.GlobSync; + glob.glob = glob; + function extend(origin, add) { + if (add === null || typeof add !== "object") { + return origin; } - function EnqueueValueWithSize(container, value, size) { - if (!IsNonNegativeNumber(size) || size === Infinity) { - throw new RangeError("Size must be a finite, non-NaN, non-negative number."); - } - container._queue.push({ value, size }); - container._queueTotalSize += size; + var keys = Object.keys(add); + var i2 = keys.length; + while (i2--) { + origin[keys[i2]] = add[keys[i2]]; } - function PeekQueueValue(container) { - const pair = container._queue.peek(); - return pair.value; + return origin; + } + glob.hasMagic = function(pattern, options_) { + var options = extend({}, options_); + options.noprocess = true; + var g = new Glob(pattern, options); + var set2 = g.minimatch.set; + if (!pattern) + return false; + if (set2.length > 1) + return true; + for (var j = 0; j < set2[0].length; j++) { + if (typeof set2[0][j] !== "string") + return true; } - function ResetQueue(container) { - container._queue = new SimpleQueue(); - container._queueTotalSize = 0; + return false; + }; + glob.Glob = Glob; + inherits(Glob, EE); + function Glob(pattern, options, cb) { + if (typeof options === "function") { + cb = options; + options = null; } - function isDataViewConstructor(ctor) { - return ctor === DataView; + if (options && options.sync) { + if (cb) + throw new TypeError("callback provided to sync glob"); + return new GlobSync(pattern, options); } - function isDataView(view) { - return isDataViewConstructor(view.constructor); + if (!(this instanceof Glob)) + return new Glob(pattern, options, cb); + setopts(this, pattern, options); + this._didRealPath = false; + var n = this.minimatch.set.length; + this.matches = new Array(n); + if (typeof cb === "function") { + cb = once2(cb); + this.on("error", cb); + this.on("end", function(matches) { + cb(null, matches); + }); } - function arrayBufferViewElementSize(ctor) { - if (isDataViewConstructor(ctor)) { - return 1; - } - return ctor.BYTES_PER_ELEMENT; + var self2 = this; + this._processing = 0; + this._emitQueue = []; + this._processQueue = []; + this.paused = false; + if (this.noprocess) + return this; + if (n === 0) + return done(); + var sync = true; + for (var i2 = 0; i2 < n; i2++) { + this._process(this.minimatch.set[i2], i2, false, done); } - class ReadableStreamBYOBRequest { - constructor() { - throw new TypeError("Illegal constructor"); - } - /** - * Returns the view for writing in to, or `null` if the BYOB request has already been responded to. - */ - get view() { - if (!IsReadableStreamBYOBRequest(this)) { - throw byobRequestBrandCheckException("view"); - } - return this._view; - } - respond(bytesWritten) { - if (!IsReadableStreamBYOBRequest(this)) { - throw byobRequestBrandCheckException("respond"); - } - assertRequiredArgument(bytesWritten, 1, "respond"); - bytesWritten = convertUnsignedLongLongWithEnforceRange(bytesWritten, "First parameter"); - if (this._associatedReadableByteStreamController === void 0) { - throw new TypeError("This BYOB request has been invalidated"); - } - if (IsDetachedBuffer(this._view.buffer)) { - throw new TypeError(`The BYOB request's buffer has been detached and so cannot be used as a response`); - } - ReadableByteStreamControllerRespond(this._associatedReadableByteStreamController, bytesWritten); - } - respondWithNewView(view) { - if (!IsReadableStreamBYOBRequest(this)) { - throw byobRequestBrandCheckException("respondWithNewView"); - } - assertRequiredArgument(view, 1, "respondWithNewView"); - if (!ArrayBuffer.isView(view)) { - throw new TypeError("You can only respond with array buffer views"); - } - if (this._associatedReadableByteStreamController === void 0) { - throw new TypeError("This BYOB request has been invalidated"); - } - if (IsDetachedBuffer(view.buffer)) { - throw new TypeError("The given view's buffer has been detached and so cannot be used as a response"); + sync = false; + function done() { + --self2._processing; + if (self2._processing <= 0) { + if (sync) { + process.nextTick(function() { + self2._finish(); + }); + } else { + self2._finish(); } - ReadableByteStreamControllerRespondWithNewView(this._associatedReadableByteStreamController, view); } } - Object.defineProperties(ReadableStreamBYOBRequest.prototype, { - respond: { enumerable: true }, - respondWithNewView: { enumerable: true }, - view: { enumerable: true } - }); - setFunctionName(ReadableStreamBYOBRequest.prototype.respond, "respond"); - setFunctionName(ReadableStreamBYOBRequest.prototype.respondWithNewView, "respondWithNewView"); - if (typeof Symbol.toStringTag === "symbol") { - Object.defineProperty(ReadableStreamBYOBRequest.prototype, Symbol.toStringTag, { - value: "ReadableStreamBYOBRequest", - configurable: true + } + Glob.prototype._finish = function() { + assert(this instanceof Glob); + if (this.aborted) + return; + if (this.realpath && !this._didRealpath) + return this._realpath(); + common.finish(this); + this.emit("end", this.found); + }; + Glob.prototype._realpath = function() { + if (this._didRealpath) + return; + this._didRealpath = true; + var n = this.matches.length; + if (n === 0) + return this._finish(); + var self2 = this; + for (var i2 = 0; i2 < this.matches.length; i2++) + this._realpathSet(i2, next); + function next() { + if (--n === 0) + self2._finish(); + } + }; + Glob.prototype._realpathSet = function(index, cb) { + var matchset = this.matches[index]; + if (!matchset) + return cb(); + var found = Object.keys(matchset); + var self2 = this; + var n = found.length; + if (n === 0) + return cb(); + var set2 = this.matches[index] = /* @__PURE__ */ Object.create(null); + found.forEach(function(p, i2) { + p = self2._makeAbs(p); + rp.realpath(p, self2.realpathCache, function(er, real) { + if (!er) + set2[real] = true; + else if (er.syscall === "stat") + set2[p] = true; + else + self2.emit("error", er); + if (--n === 0) { + self2.matches[index] = set2; + cb(); + } }); + }); + }; + Glob.prototype._mark = function(p) { + return common.mark(this, p); + }; + Glob.prototype._makeAbs = function(f3) { + return common.makeAbs(this, f3); + }; + Glob.prototype.abort = function() { + this.aborted = true; + this.emit("abort"); + }; + Glob.prototype.pause = function() { + if (!this.paused) { + this.paused = true; + this.emit("pause"); } - class ReadableByteStreamController { - constructor() { - throw new TypeError("Illegal constructor"); - } - /** - * Returns the current BYOB pull request, or `null` if there isn't one. - */ - get byobRequest() { - if (!IsReadableByteStreamController(this)) { - throw byteStreamControllerBrandCheckException("byobRequest"); + }; + Glob.prototype.resume = function() { + if (this.paused) { + this.emit("resume"); + this.paused = false; + if (this._emitQueue.length) { + var eq = this._emitQueue.slice(0); + this._emitQueue.length = 0; + for (var i2 = 0; i2 < eq.length; i2++) { + var e2 = eq[i2]; + this._emitMatch(e2[0], e2[1]); } - return ReadableByteStreamControllerGetBYOBRequest(this); } - /** - * Returns the desired size to fill the controlled stream's internal queue. It can be negative, if the queue is - * over-full. An underlying byte source ought to use this information to determine when and how to apply backpressure. - */ - get desiredSize() { - if (!IsReadableByteStreamController(this)) { - throw byteStreamControllerBrandCheckException("desiredSize"); + if (this._processQueue.length) { + var pq = this._processQueue.slice(0); + this._processQueue.length = 0; + for (var i2 = 0; i2 < pq.length; i2++) { + var p = pq[i2]; + this._processing--; + this._process(p[0], p[1], p[2], p[3]); } - return ReadableByteStreamControllerGetDesiredSize(this); } - /** - * Closes the controlled readable stream. Consumers will still be able to read any previously-enqueued chunks from - * the stream, but once those are read, the stream will become closed. - */ - close() { - if (!IsReadableByteStreamController(this)) { - throw byteStreamControllerBrandCheckException("close"); - } - if (this._closeRequested) { - throw new TypeError("The stream has already been closed; do not close it again!"); - } - const state = this._controlledReadableByteStream._state; - if (state !== "readable") { - throw new TypeError(`The stream (in ${state} state) is not in the readable state and cannot be closed`); + } + }; + Glob.prototype._process = function(pattern, index, inGlobStar, cb) { + assert(this instanceof Glob); + assert(typeof cb === "function"); + if (this.aborted) + return; + this._processing++; + if (this.paused) { + this._processQueue.push([pattern, index, inGlobStar, cb]); + return; + } + var n = 0; + while (typeof pattern[n] === "string") { + n++; + } + var prefix; + switch (n) { + // if not, then this is rather simple + case pattern.length: + this._processSimple(pattern.join("/"), index, cb); + return; + case 0: + prefix = null; + break; + default: + prefix = pattern.slice(0, n).join("/"); + break; + } + var remain = pattern.slice(n); + var read; + if (prefix === null) + read = "."; + else if (isAbsolute(prefix) || isAbsolute(pattern.map(function(p) { + return typeof p === "string" ? p : "[*]"; + }).join("/"))) { + if (!prefix || !isAbsolute(prefix)) + prefix = "/" + prefix; + read = prefix; + } else + read = prefix; + var abs = this._makeAbs(read); + if (childrenIgnored(this, read)) + return cb(); + var isGlobStar = remain[0] === minimatch2.GLOBSTAR; + if (isGlobStar) + this._processGlobStar(prefix, read, abs, remain, index, inGlobStar, cb); + else + this._processReaddir(prefix, read, abs, remain, index, inGlobStar, cb); + }; + Glob.prototype._processReaddir = function(prefix, read, abs, remain, index, inGlobStar, cb) { + var self2 = this; + this._readdir(abs, inGlobStar, function(er, entries) { + return self2._processReaddir2(prefix, read, abs, remain, index, inGlobStar, entries, cb); + }); + }; + Glob.prototype._processReaddir2 = function(prefix, read, abs, remain, index, inGlobStar, entries, cb) { + if (!entries) + return cb(); + var pn = remain[0]; + var negate = !!this.minimatch.negate; + var rawGlob = pn._glob; + var dotOk = this.dot || rawGlob.charAt(0) === "."; + var matchedEntries = []; + for (var i2 = 0; i2 < entries.length; i2++) { + var e2 = entries[i2]; + if (e2.charAt(0) !== "." || dotOk) { + var m2; + if (negate && !prefix) { + m2 = !e2.match(pn); + } else { + m2 = e2.match(pn); } - ReadableByteStreamControllerClose(this); + if (m2) + matchedEntries.push(e2); } - enqueue(chunk) { - if (!IsReadableByteStreamController(this)) { - throw byteStreamControllerBrandCheckException("enqueue"); - } - assertRequiredArgument(chunk, 1, "enqueue"); - if (!ArrayBuffer.isView(chunk)) { - throw new TypeError("chunk must be an array buffer view"); - } - if (chunk.byteLength === 0) { - throw new TypeError("chunk must have non-zero byteLength"); - } - if (chunk.buffer.byteLength === 0) { - throw new TypeError(`chunk's buffer must have non-zero byteLength`); - } - if (this._closeRequested) { - throw new TypeError("stream is closed or draining"); + } + var len = matchedEntries.length; + if (len === 0) + return cb(); + if (remain.length === 1 && !this.mark && !this.stat) { + if (!this.matches[index]) + this.matches[index] = /* @__PURE__ */ Object.create(null); + for (var i2 = 0; i2 < len; i2++) { + var e2 = matchedEntries[i2]; + if (prefix) { + if (prefix !== "/") + e2 = prefix + "/" + e2; + else + e2 = prefix + e2; } - const state = this._controlledReadableByteStream._state; - if (state !== "readable") { - throw new TypeError(`The stream (in ${state} state) is not in the readable state and cannot be enqueued to`); + if (e2.charAt(0) === "/" && !this.nomount) { + e2 = path2.join(this.root, e2); } - ReadableByteStreamControllerEnqueue(this, chunk); + this._emitMatch(index, e2); } - /** - * Errors the controlled readable stream, making all future interactions with it fail with the given error `e`. - */ - error(e2 = void 0) { - if (!IsReadableByteStreamController(this)) { - throw byteStreamControllerBrandCheckException("error"); - } - ReadableByteStreamControllerError(this, e2); - } - /** @internal */ - [CancelSteps](reason) { - ReadableByteStreamControllerClearPendingPullIntos(this); - ResetQueue(this); - const result = this._cancelAlgorithm(reason); - ReadableByteStreamControllerClearAlgorithms(this); - return result; - } - /** @internal */ - [PullSteps](readRequest) { - const stream = this._controlledReadableByteStream; - if (this._queueTotalSize > 0) { - ReadableByteStreamControllerFillReadRequestFromQueue(this, readRequest); - return; - } - const autoAllocateChunkSize = this._autoAllocateChunkSize; - if (autoAllocateChunkSize !== void 0) { - let buffer; - try { - buffer = new ArrayBuffer(autoAllocateChunkSize); - } catch (bufferE) { - readRequest._errorSteps(bufferE); - return; - } - const pullIntoDescriptor = { - buffer, - bufferByteLength: autoAllocateChunkSize, - byteOffset: 0, - byteLength: autoAllocateChunkSize, - bytesFilled: 0, - minimumFill: 1, - elementSize: 1, - viewConstructor: Uint8Array, - readerType: "default" - }; - this._pendingPullIntos.push(pullIntoDescriptor); - } - ReadableStreamAddReadRequest(stream, readRequest); - ReadableByteStreamControllerCallPullIfNeeded(this); - } - /** @internal */ - [ReleaseSteps]() { - if (this._pendingPullIntos.length > 0) { - const firstPullInto = this._pendingPullIntos.peek(); - firstPullInto.readerType = "none"; - this._pendingPullIntos = new SimpleQueue(); - this._pendingPullIntos.push(firstPullInto); - } + return cb(); + } + remain.shift(); + for (var i2 = 0; i2 < len; i2++) { + var e2 = matchedEntries[i2]; + var newPattern; + if (prefix) { + if (prefix !== "/") + e2 = prefix + "/" + e2; + else + e2 = prefix + e2; } + this._process([e2].concat(remain), index, inGlobStar, cb); } - Object.defineProperties(ReadableByteStreamController.prototype, { - close: { enumerable: true }, - enqueue: { enumerable: true }, - error: { enumerable: true }, - byobRequest: { enumerable: true }, - desiredSize: { enumerable: true } - }); - setFunctionName(ReadableByteStreamController.prototype.close, "close"); - setFunctionName(ReadableByteStreamController.prototype.enqueue, "enqueue"); - setFunctionName(ReadableByteStreamController.prototype.error, "error"); - if (typeof Symbol.toStringTag === "symbol") { - Object.defineProperty(ReadableByteStreamController.prototype, Symbol.toStringTag, { - value: "ReadableByteStreamController", - configurable: true - }); + cb(); + }; + Glob.prototype._emitMatch = function(index, e2) { + if (this.aborted) + return; + if (isIgnored(this, e2)) + return; + if (this.paused) { + this._emitQueue.push([index, e2]); + return; } - function IsReadableByteStreamController(x2) { - if (!typeIsObject(x2)) { - return false; - } - if (!Object.prototype.hasOwnProperty.call(x2, "_controlledReadableByteStream")) { - return false; - } - return x2 instanceof ReadableByteStreamController; + var abs = isAbsolute(e2) ? e2 : this._makeAbs(e2); + if (this.mark) + e2 = this._mark(e2); + if (this.absolute) + e2 = abs; + if (this.matches[index][e2]) + return; + if (this.nodir) { + var c = this.cache[abs]; + if (c === "DIR" || Array.isArray(c)) + return; } - function IsReadableStreamBYOBRequest(x2) { - if (!typeIsObject(x2)) { - return false; - } - if (!Object.prototype.hasOwnProperty.call(x2, "_associatedReadableByteStreamController")) { - return false; - } - return x2 instanceof ReadableStreamBYOBRequest; + this.matches[index][e2] = true; + var st = this.statCache[abs]; + if (st) + this.emit("stat", e2, st); + this.emit("match", e2); + }; + Glob.prototype._readdirInGlobStar = function(abs, cb) { + if (this.aborted) + return; + if (this.follow) + return this._readdir(abs, false, cb); + var lstatkey = "lstat\0" + abs; + var self2 = this; + var lstatcb = inflight(lstatkey, lstatcb_); + if (lstatcb) + self2.fs.lstat(abs, lstatcb); + function lstatcb_(er, lstat) { + if (er && er.code === "ENOENT") + return cb(); + var isSym = lstat && lstat.isSymbolicLink(); + self2.symlinks[abs] = isSym; + if (!isSym && lstat && !lstat.isDirectory()) { + self2.cache[abs] = "FILE"; + cb(); + } else + self2._readdir(abs, false, cb); } - function ReadableByteStreamControllerCallPullIfNeeded(controller) { - const shouldPull = ReadableByteStreamControllerShouldCallPull(controller); - if (!shouldPull) { - return; - } - if (controller._pulling) { - controller._pullAgain = true; - return; + }; + Glob.prototype._readdir = function(abs, inGlobStar, cb) { + if (this.aborted) + return; + cb = inflight("readdir\0" + abs + "\0" + inGlobStar, cb); + if (!cb) + return; + if (inGlobStar && !ownProp(this.symlinks, abs)) + return this._readdirInGlobStar(abs, cb); + if (ownProp(this.cache, abs)) { + var c = this.cache[abs]; + if (!c || c === "FILE") + return cb(); + if (Array.isArray(c)) + return cb(null, c); + } + var self2 = this; + self2.fs.readdir(abs, readdirCb(this, abs, cb)); + }; + function readdirCb(self2, abs, cb) { + return function(er, entries) { + if (er) + self2._readdirError(abs, er, cb); + else + self2._readdirEntries(abs, entries, cb); + }; + } + Glob.prototype._readdirEntries = function(abs, entries, cb) { + if (this.aborted) + return; + if (!this.mark && !this.stat) { + for (var i2 = 0; i2 < entries.length; i2++) { + var e2 = entries[i2]; + if (abs === "/") + e2 = abs + e2; + else + e2 = abs + "/" + e2; + this.cache[e2] = true; } - controller._pulling = true; - const pullPromise = controller._pullAlgorithm(); - uponPromise(pullPromise, () => { - controller._pulling = false; - if (controller._pullAgain) { - controller._pullAgain = false; - ReadableByteStreamControllerCallPullIfNeeded(controller); + } + this.cache[abs] = entries; + return cb(null, entries); + }; + Glob.prototype._readdirError = function(f3, er, cb) { + if (this.aborted) + return; + switch (er.code) { + case "ENOTSUP": + // https://github.com/isaacs/node-glob/issues/205 + case "ENOTDIR": + var abs = this._makeAbs(f3); + this.cache[abs] = "FILE"; + if (abs === this.cwdAbs) { + var error = new Error(er.code + " invalid cwd " + this.cwd); + error.path = this.cwd; + error.code = er.code; + this.emit("error", error); + this.abort(); } - return null; - }, (e2) => { - ReadableByteStreamControllerError(controller, e2); - return null; - }); + break; + case "ENOENT": + // not terribly unusual + case "ELOOP": + case "ENAMETOOLONG": + case "UNKNOWN": + this.cache[this._makeAbs(f3)] = false; + break; + default: + this.cache[this._makeAbs(f3)] = false; + if (this.strict) { + this.emit("error", er); + this.abort(); + } + if (!this.silent) + console.error("glob error", er); + break; } - function ReadableByteStreamControllerClearPendingPullIntos(controller) { - ReadableByteStreamControllerInvalidateBYOBRequest(controller); - controller._pendingPullIntos = new SimpleQueue(); + return cb(); + }; + Glob.prototype._processGlobStar = function(prefix, read, abs, remain, index, inGlobStar, cb) { + var self2 = this; + this._readdir(abs, inGlobStar, function(er, entries) { + self2._processGlobStar2(prefix, read, abs, remain, index, inGlobStar, entries, cb); + }); + }; + Glob.prototype._processGlobStar2 = function(prefix, read, abs, remain, index, inGlobStar, entries, cb) { + if (!entries) + return cb(); + var remainWithoutGlobStar = remain.slice(1); + var gspref = prefix ? [prefix] : []; + var noGlobStar = gspref.concat(remainWithoutGlobStar); + this._process(noGlobStar, index, false, cb); + var isSym = this.symlinks[abs]; + var len = entries.length; + if (isSym && inGlobStar) + return cb(); + for (var i2 = 0; i2 < len; i2++) { + var e2 = entries[i2]; + if (e2.charAt(0) === "." && !this.dot) + continue; + var instead = gspref.concat(entries[i2], remainWithoutGlobStar); + this._process(instead, index, true, cb); + var below = gspref.concat(entries[i2], remain); + this._process(below, index, true, cb); } - function ReadableByteStreamControllerCommitPullIntoDescriptor(stream, pullIntoDescriptor) { - let done = false; - if (stream._state === "closed") { - done = true; - } - const filledView = ReadableByteStreamControllerConvertPullIntoDescriptor(pullIntoDescriptor); - if (pullIntoDescriptor.readerType === "default") { - ReadableStreamFulfillReadRequest(stream, filledView, done); + cb(); + }; + Glob.prototype._processSimple = function(prefix, index, cb) { + var self2 = this; + this._stat(prefix, function(er, exists2) { + self2._processSimple2(prefix, index, er, exists2, cb); + }); + }; + Glob.prototype._processSimple2 = function(prefix, index, er, exists2, cb) { + if (!this.matches[index]) + this.matches[index] = /* @__PURE__ */ Object.create(null); + if (!exists2) + return cb(); + if (prefix && isAbsolute(prefix) && !this.nomount) { + var trail = /[\/\\]$/.test(prefix); + if (prefix.charAt(0) === "/") { + prefix = path2.join(this.root, prefix); } else { - ReadableStreamFulfillReadIntoRequest(stream, filledView, done); + prefix = path2.resolve(this.root, prefix); + if (trail) + prefix += "/"; } } - function ReadableByteStreamControllerConvertPullIntoDescriptor(pullIntoDescriptor) { - const bytesFilled = pullIntoDescriptor.bytesFilled; - const elementSize = pullIntoDescriptor.elementSize; - return new pullIntoDescriptor.viewConstructor(pullIntoDescriptor.buffer, pullIntoDescriptor.byteOffset, bytesFilled / elementSize); - } - function ReadableByteStreamControllerEnqueueChunkToQueue(controller, buffer, byteOffset, byteLength) { - controller._queue.push({ buffer, byteOffset, byteLength }); - controller._queueTotalSize += byteLength; + if (process.platform === "win32") + prefix = prefix.replace(/\\/g, "/"); + this._emitMatch(index, prefix); + cb(); + }; + Glob.prototype._stat = function(f3, cb) { + var abs = this._makeAbs(f3); + var needDir = f3.slice(-1) === "/"; + if (f3.length > this.maxLength) + return cb(); + if (!this.stat && ownProp(this.cache, abs)) { + var c = this.cache[abs]; + if (Array.isArray(c)) + c = "DIR"; + if (!needDir || c === "DIR") + return cb(null, c); + if (needDir && c === "FILE") + return cb(); } - function ReadableByteStreamControllerEnqueueClonedChunkToQueue(controller, buffer, byteOffset, byteLength) { - let clonedChunk; - try { - clonedChunk = ArrayBufferSlice(buffer, byteOffset, byteOffset + byteLength); - } catch (cloneE) { - ReadableByteStreamControllerError(controller, cloneE); - throw cloneE; + var exists2; + var stat2 = this.statCache[abs]; + if (stat2 !== void 0) { + if (stat2 === false) + return cb(null, stat2); + else { + var type = stat2.isDirectory() ? "DIR" : "FILE"; + if (needDir && type === "FILE") + return cb(); + else + return cb(null, type, stat2); } - ReadableByteStreamControllerEnqueueChunkToQueue(controller, clonedChunk, 0, byteLength); } - function ReadableByteStreamControllerEnqueueDetachedPullIntoToQueue(controller, firstDescriptor) { - if (firstDescriptor.bytesFilled > 0) { - ReadableByteStreamControllerEnqueueClonedChunkToQueue(controller, firstDescriptor.buffer, firstDescriptor.byteOffset, firstDescriptor.bytesFilled); + var self2 = this; + var statcb = inflight("stat\0" + abs, lstatcb_); + if (statcb) + self2.fs.lstat(abs, statcb); + function lstatcb_(er, lstat) { + if (lstat && lstat.isSymbolicLink()) { + return self2.fs.stat(abs, function(er2, stat3) { + if (er2) + self2._stat2(f3, abs, null, lstat, cb); + else + self2._stat2(f3, abs, er2, stat3, cb); + }); + } else { + self2._stat2(f3, abs, er, lstat, cb); } - ReadableByteStreamControllerShiftPendingPullInto(controller); } - function ReadableByteStreamControllerFillPullIntoDescriptorFromQueue(controller, pullIntoDescriptor) { - const maxBytesToCopy = Math.min(controller._queueTotalSize, pullIntoDescriptor.byteLength - pullIntoDescriptor.bytesFilled); - const maxBytesFilled = pullIntoDescriptor.bytesFilled + maxBytesToCopy; - let totalBytesToCopyRemaining = maxBytesToCopy; - let ready = false; - const remainderBytes = maxBytesFilled % pullIntoDescriptor.elementSize; - const maxAlignedBytes = maxBytesFilled - remainderBytes; - if (maxAlignedBytes >= pullIntoDescriptor.minimumFill) { - totalBytesToCopyRemaining = maxAlignedBytes - pullIntoDescriptor.bytesFilled; - ready = true; - } - const queue = controller._queue; - while (totalBytesToCopyRemaining > 0) { - const headOfQueue = queue.peek(); - const bytesToCopy = Math.min(totalBytesToCopyRemaining, headOfQueue.byteLength); - const destStart = pullIntoDescriptor.byteOffset + pullIntoDescriptor.bytesFilled; - CopyDataBlockBytes(pullIntoDescriptor.buffer, destStart, headOfQueue.buffer, headOfQueue.byteOffset, bytesToCopy); - if (headOfQueue.byteLength === bytesToCopy) { - queue.shift(); - } else { - headOfQueue.byteOffset += bytesToCopy; - headOfQueue.byteLength -= bytesToCopy; - } - controller._queueTotalSize -= bytesToCopy; - ReadableByteStreamControllerFillHeadPullIntoDescriptor(controller, bytesToCopy, pullIntoDescriptor); - totalBytesToCopyRemaining -= bytesToCopy; - } - return ready; + }; + Glob.prototype._stat2 = function(f3, abs, er, stat2, cb) { + if (er && (er.code === "ENOENT" || er.code === "ENOTDIR")) { + this.statCache[abs] = false; + return cb(); } - function ReadableByteStreamControllerFillHeadPullIntoDescriptor(controller, size, pullIntoDescriptor) { - pullIntoDescriptor.bytesFilled += size; + var needDir = f3.slice(-1) === "/"; + this.statCache[abs] = stat2; + if (abs.slice(-1) === "/" && stat2 && !stat2.isDirectory()) + return cb(null, false, stat2); + var c = true; + if (stat2) + c = stat2.isDirectory() ? "DIR" : "FILE"; + this.cache[abs] = this.cache[abs] || c; + if (needDir && c === "FILE") + return cb(); + return cb(null, c, stat2); + }; + } +}); + +// node_modules/true-case-path/index.js +var require_true_case_path = __commonJS({ + "node_modules/true-case-path/index.js"(exports2, module2) { + "use strict"; + var glob = require_glob(); + var path2 = require("path"); + function trueCasePathSync(fsPath) { + var fsPathNormalized = path2.normalize(fsPath); + if (process.platform === "darwin") fsPathNormalized = fsPathNormalized.normalize("NFD"); + var pathRoot = path2.parse(fsPathNormalized).root; + var noDrivePath = fsPathNormalized.slice(Math.max(pathRoot.length - 1, 0)); + return glob.sync(noDrivePath, { nocase: true, cwd: pathRoot })[0]; + } + module2.exports = trueCasePathSync; + } +}); + +// node_modules/codeowners/codeowners.js +var require_codeowners = __commonJS({ + "node_modules/codeowners/codeowners.js"(exports2, module2) { + "use strict"; + var findUp = require_find_up(); + var fs2 = require("fs"); + var ignore = require_ignore(); + var isDirectory = require_is_directory(); + var path2 = require("path"); + var trueCasePath = require_true_case_path(); + function ownerMatcher(pathString) { + const matcher = ignore().add(pathString); + return matcher.ignores.bind(matcher); + } + function Codeowners2(currentPath, fileName = "CODEOWNERS") { + const pathOrCwd = currentPath || process.cwd(); + const codeownersPath = findUp.sync( + [`.github/${fileName}`, `.gitlab/${fileName}`, `docs/${fileName}`, `${fileName}`], + { cwd: pathOrCwd } + ); + if (!codeownersPath) { + throw new Error(`Could not find a CODEOWNERS file`); } - function ReadableByteStreamControllerHandleQueueDrain(controller) { - if (controller._queueTotalSize === 0 && controller._closeRequested) { - ReadableByteStreamControllerClearAlgorithms(controller); - ReadableStreamClose(controller._controlledReadableByteStream); - } else { - ReadableByteStreamControllerCallPullIfNeeded(controller); - } + this.codeownersFilePath = trueCasePath(codeownersPath); + this.codeownersDirectory = path2.dirname(this.codeownersFilePath); + if (this.codeownersDirectory.match(/\/(.github|.gitlab|docs)$/i)) { + this.codeownersDirectory = path2.dirname(this.codeownersDirectory); } - function ReadableByteStreamControllerInvalidateBYOBRequest(controller) { - if (controller._byobRequest === null) { - return; - } - controller._byobRequest._associatedReadableByteStreamController = void 0; - controller._byobRequest._view = null; - controller._byobRequest = null; + const codeownersFile = path2.basename(this.codeownersFilePath); + if (codeownersFile !== fileName) { + throw new Error(`Found a ${fileName} file but it was lower-cased: ${this.codeownersFilePath}`); } - function ReadableByteStreamControllerProcessPullIntoDescriptorsUsingQueue(controller) { - while (controller._pendingPullIntos.length > 0) { - if (controller._queueTotalSize === 0) { - return; - } - const pullIntoDescriptor = controller._pendingPullIntos.peek(); - if (ReadableByteStreamControllerFillPullIntoDescriptorFromQueue(controller, pullIntoDescriptor)) { - ReadableByteStreamControllerShiftPendingPullInto(controller); - ReadableByteStreamControllerCommitPullIntoDescriptor(controller._controlledReadableByteStream, pullIntoDescriptor); - } - } + if (isDirectory.sync(this.codeownersFilePath)) { + throw new Error(`Found a ${fileName} but it's a directory: ${this.codeownersFilePath}`); } - function ReadableByteStreamControllerProcessReadRequestsUsingQueue(controller) { - const reader = controller._controlledReadableByteStream._reader; - while (reader._readRequests.length > 0) { - if (controller._queueTotalSize === 0) { - return; - } - const readRequest = reader._readRequests.shift(); - ReadableByteStreamControllerFillReadRequestFromQueue(controller, readRequest); + const lines = fs2.readFileSync(this.codeownersFilePath).toString().split(/\r\n|\r|\n/); + const ownerEntries = []; + for (const line of lines) { + if (!line) { + continue; + } + if (line.startsWith("#")) { + continue; } + const [pathString, ...usernames] = line.split(/\s+/); + ownerEntries.push({ + path: pathString, + usernames, + match: ownerMatcher(pathString) + }); } - function ReadableByteStreamControllerPullInto(controller, view, min, readIntoRequest) { - const stream = controller._controlledReadableByteStream; - const ctor = view.constructor; - const elementSize = arrayBufferViewElementSize(ctor); - const { byteOffset, byteLength } = view; - const minimumFill = min * elementSize; - let buffer; - try { - buffer = TransferArrayBuffer(view.buffer); - } catch (e2) { - readIntoRequest._errorSteps(e2); - return; + this.ownerEntries = ownerEntries.reverse(); + } + var EMPTY_ARRAY = []; + Codeowners2.prototype.getOwner = function getOwner(filePath) { + for (const entry of this.ownerEntries) { + if (entry.match(filePath)) { + return entry.usernames; } - const pullIntoDescriptor = { - buffer, - bufferByteLength: buffer.byteLength, - byteOffset, - byteLength, - bytesFilled: 0, - minimumFill, - elementSize, - viewConstructor: ctor, - readerType: "byob" - }; - if (controller._pendingPullIntos.length > 0) { - controller._pendingPullIntos.push(pullIntoDescriptor); - ReadableStreamAddReadIntoRequest(stream, readIntoRequest); - return; - } - if (stream._state === "closed") { - const emptyView = new ctor(pullIntoDescriptor.buffer, pullIntoDescriptor.byteOffset, 0); - readIntoRequest._closeSteps(emptyView); - return; + } + return EMPTY_ARRAY; + }; + module2.exports = Codeowners2; + } +}); + +// node_modules/brace-expansion/index.js +var require_brace_expansion2 = __commonJS({ + "node_modules/brace-expansion/index.js"(exports2, module2) { + "use strict"; + var balanced = require_balanced_match(); + module2.exports = expandTop; + var escSlash = "\0SLASH" + Math.random() + "\0"; + var escOpen = "\0OPEN" + Math.random() + "\0"; + var escClose = "\0CLOSE" + Math.random() + "\0"; + var escComma = "\0COMMA" + Math.random() + "\0"; + var escPeriod = "\0PERIOD" + Math.random() + "\0"; + function numeric(str) { + return parseInt(str, 10) == str ? parseInt(str, 10) : str.charCodeAt(0); + } + function escapeBraces(str) { + return str.split("\\\\").join(escSlash).split("\\{").join(escOpen).split("\\}").join(escClose).split("\\,").join(escComma).split("\\.").join(escPeriod); + } + function unescapeBraces(str) { + return str.split(escSlash).join("\\").split(escOpen).join("{").split(escClose).join("}").split(escComma).join(",").split(escPeriod).join("."); + } + function parseCommaParts(str) { + if (!str) + return [""]; + var parts = []; + var m2 = balanced("{", "}", str); + if (!m2) + return str.split(","); + var pre = m2.pre; + var body = m2.body; + var post = m2.post; + var p = pre.split(","); + p[p.length - 1] += "{" + body + "}"; + var postParts = parseCommaParts(post); + if (post.length) { + p[p.length - 1] += postParts.shift(); + p.push.apply(p, postParts); + } + parts.push.apply(parts, p); + return parts; + } + function expandTop(str) { + if (!str) + return []; + if (str.substr(0, 2) === "{}") { + str = "\\{\\}" + str.substr(2); + } + return expand4(escapeBraces(str), true).map(unescapeBraces); + } + function embrace(str) { + return "{" + str + "}"; + } + function isPadded(el) { + return /^-?0\d/.test(el); + } + function lte(i2, y) { + return i2 <= y; + } + function gte(i2, y) { + return i2 >= y; + } + function expand4(str, isTop) { + var expansions = []; + var m2 = balanced("{", "}", str); + if (!m2) return [str]; + var pre = m2.pre; + var post = m2.post.length ? expand4(m2.post, false) : [""]; + if (/\$$/.test(m2.pre)) { + for (var k = 0; k < post.length; k++) { + var expansion = pre + "{" + m2.body + "}" + post[k]; + expansions.push(expansion); } - if (controller._queueTotalSize > 0) { - if (ReadableByteStreamControllerFillPullIntoDescriptorFromQueue(controller, pullIntoDescriptor)) { - const filledView = ReadableByteStreamControllerConvertPullIntoDescriptor(pullIntoDescriptor); - ReadableByteStreamControllerHandleQueueDrain(controller); - readIntoRequest._chunkSteps(filledView); - return; + } else { + var isNumericSequence = /^-?\d+\.\.-?\d+(?:\.\.-?\d+)?$/.test(m2.body); + var isAlphaSequence = /^[a-zA-Z]\.\.[a-zA-Z](?:\.\.-?\d+)?$/.test(m2.body); + var isSequence = isNumericSequence || isAlphaSequence; + var isOptions = m2.body.indexOf(",") >= 0; + if (!isSequence && !isOptions) { + if (m2.post.match(/,.*\}/)) { + str = m2.pre + "{" + m2.body + escClose + m2.post; + return expand4(str); } - if (controller._closeRequested) { - const e2 = new TypeError("Insufficient bytes to fill elements in the given buffer"); - ReadableByteStreamControllerError(controller, e2); - readIntoRequest._errorSteps(e2); - return; + return [str]; + } + var n; + if (isSequence) { + n = m2.body.split(/\.\./); + } else { + n = parseCommaParts(m2.body); + if (n.length === 1) { + n = expand4(n[0], false).map(embrace); + if (n.length === 1) { + return post.map(function(p) { + return m2.pre + n[0] + p; + }); + } } } - controller._pendingPullIntos.push(pullIntoDescriptor); - ReadableStreamAddReadIntoRequest(stream, readIntoRequest); - ReadableByteStreamControllerCallPullIfNeeded(controller); - } - function ReadableByteStreamControllerRespondInClosedState(controller, firstDescriptor) { - if (firstDescriptor.readerType === "none") { - ReadableByteStreamControllerShiftPendingPullInto(controller); + var N; + if (isSequence) { + var x2 = numeric(n[0]); + var y = numeric(n[1]); + var width = Math.max(n[0].length, n[1].length); + var incr = n.length == 3 ? Math.abs(numeric(n[2])) : 1; + var test = lte; + var reverse = y < x2; + if (reverse) { + incr *= -1; + test = gte; + } + var pad = n.some(isPadded); + N = []; + for (var i2 = x2; test(i2, y); i2 += incr) { + var c; + if (isAlphaSequence) { + c = String.fromCharCode(i2); + if (c === "\\") + c = ""; + } else { + c = String(i2); + if (pad) { + var need = width - c.length; + if (need > 0) { + var z = new Array(need + 1).join("0"); + if (i2 < 0) + c = "-" + z + c.slice(1); + else + c = z + c; + } + } + } + N.push(c); + } + } else { + N = []; + for (var j = 0; j < n.length; j++) { + N.push.apply(N, expand4(n[j], false)); + } } - const stream = controller._controlledReadableByteStream; - if (ReadableStreamHasBYOBReader(stream)) { - while (ReadableStreamGetNumReadIntoRequests(stream) > 0) { - const pullIntoDescriptor = ReadableByteStreamControllerShiftPendingPullInto(controller); - ReadableByteStreamControllerCommitPullIntoDescriptor(stream, pullIntoDescriptor); + for (var j = 0; j < N.length; j++) { + for (var k = 0; k < post.length; k++) { + var expansion = pre + N[j] + post[k]; + if (!isTop || isSequence || expansion) + expansions.push(expansion); } } } - function ReadableByteStreamControllerRespondInReadableState(controller, bytesWritten, pullIntoDescriptor) { - ReadableByteStreamControllerFillHeadPullIntoDescriptor(controller, bytesWritten, pullIntoDescriptor); - if (pullIntoDescriptor.readerType === "none") { - ReadableByteStreamControllerEnqueueDetachedPullIntoToQueue(controller, pullIntoDescriptor); - ReadableByteStreamControllerProcessPullIntoDescriptorsUsingQueue(controller); - return; - } - if (pullIntoDescriptor.bytesFilled < pullIntoDescriptor.minimumFill) { - return; - } - ReadableByteStreamControllerShiftPendingPullInto(controller); - const remainderSize = pullIntoDescriptor.bytesFilled % pullIntoDescriptor.elementSize; - if (remainderSize > 0) { - const end = pullIntoDescriptor.byteOffset + pullIntoDescriptor.bytesFilled; - ReadableByteStreamControllerEnqueueClonedChunkToQueue(controller, pullIntoDescriptor.buffer, end - remainderSize, remainderSize); - } - pullIntoDescriptor.bytesFilled -= remainderSize; - ReadableByteStreamControllerCommitPullIntoDescriptor(controller._controlledReadableByteStream, pullIntoDescriptor); - ReadableByteStreamControllerProcessPullIntoDescriptorsUsingQueue(controller); + return expansions; + } + } +}); + +// node_modules/web-streams-polyfill/dist/ponyfill.es2018.js +var require_ponyfill_es2018 = __commonJS({ + "node_modules/web-streams-polyfill/dist/ponyfill.es2018.js"(exports2, module2) { + "use strict"; + (function(global2, factory) { + typeof exports2 === "object" && typeof module2 !== "undefined" ? factory(exports2) : typeof define === "function" && define.amd ? define(["exports"], factory) : (global2 = typeof globalThis !== "undefined" ? globalThis : global2 || self, factory(global2.WebStreamsPolyfill = {})); + })(exports2, function(exports3) { + "use strict"; + function noop4() { + return void 0; } - function ReadableByteStreamControllerRespondInternal(controller, bytesWritten) { - const firstDescriptor = controller._pendingPullIntos.peek(); - ReadableByteStreamControllerInvalidateBYOBRequest(controller); - const state = controller._controlledReadableByteStream._state; - if (state === "closed") { - ReadableByteStreamControllerRespondInClosedState(controller, firstDescriptor); - } else { - ReadableByteStreamControllerRespondInReadableState(controller, bytesWritten, firstDescriptor); + function typeIsObject(x2) { + return typeof x2 === "object" && x2 !== null || typeof x2 === "function"; + } + const rethrowAssertionErrorRejection = noop4; + function setFunctionName(fn, name) { + try { + Object.defineProperty(fn, "name", { + value: name, + configurable: true + }); + } catch (_a3) { } - ReadableByteStreamControllerCallPullIfNeeded(controller); } - function ReadableByteStreamControllerShiftPendingPullInto(controller) { - const descriptor = controller._pendingPullIntos.shift(); - return descriptor; + const originalPromise = Promise; + const originalPromiseThen = Promise.prototype.then; + const originalPromiseReject = Promise.reject.bind(originalPromise); + function newPromise(executor) { + return new originalPromise(executor); } - function ReadableByteStreamControllerShouldCallPull(controller) { - const stream = controller._controlledReadableByteStream; - if (stream._state !== "readable") { - return false; - } - if (controller._closeRequested) { - return false; - } - if (!controller._started) { - return false; - } - if (ReadableStreamHasDefaultReader(stream) && ReadableStreamGetNumReadRequests(stream) > 0) { - return true; - } - if (ReadableStreamHasBYOBReader(stream) && ReadableStreamGetNumReadIntoRequests(stream) > 0) { - return true; - } - const desiredSize = ReadableByteStreamControllerGetDesiredSize(controller); - if (desiredSize > 0) { - return true; - } - return false; + function promiseResolvedWith(value) { + return newPromise((resolve) => resolve(value)); } - function ReadableByteStreamControllerClearAlgorithms(controller) { - controller._pullAlgorithm = void 0; - controller._cancelAlgorithm = void 0; + function promiseRejectedWith(reason) { + return originalPromiseReject(reason); } - function ReadableByteStreamControllerClose(controller) { - const stream = controller._controlledReadableByteStream; - if (controller._closeRequested || stream._state !== "readable") { - return; + function PerformPromiseThen(promise, onFulfilled, onRejected) { + return originalPromiseThen.call(promise, onFulfilled, onRejected); + } + function uponPromise(promise, onFulfilled, onRejected) { + PerformPromiseThen(PerformPromiseThen(promise, onFulfilled, onRejected), void 0, rethrowAssertionErrorRejection); + } + function uponFulfillment(promise, onFulfilled) { + uponPromise(promise, onFulfilled); + } + function uponRejection(promise, onRejected) { + uponPromise(promise, void 0, onRejected); + } + function transformPromiseWith(promise, fulfillmentHandler, rejectionHandler) { + return PerformPromiseThen(promise, fulfillmentHandler, rejectionHandler); + } + function setPromiseIsHandledToTrue(promise) { + PerformPromiseThen(promise, void 0, rethrowAssertionErrorRejection); + } + let _queueMicrotask = (callback) => { + if (typeof queueMicrotask === "function") { + _queueMicrotask = queueMicrotask; + } else { + const resolvedPromise = promiseResolvedWith(void 0); + _queueMicrotask = (cb) => PerformPromiseThen(resolvedPromise, cb); } - if (controller._queueTotalSize > 0) { - controller._closeRequested = true; - return; + return _queueMicrotask(callback); + }; + function reflectCall(F2, V, args) { + if (typeof F2 !== "function") { + throw new TypeError("Argument is not a function"); } - if (controller._pendingPullIntos.length > 0) { - const firstPendingPullInto = controller._pendingPullIntos.peek(); - if (firstPendingPullInto.bytesFilled % firstPendingPullInto.elementSize !== 0) { - const e2 = new TypeError("Insufficient bytes to fill elements in the given buffer"); - ReadableByteStreamControllerError(controller, e2); - throw e2; - } + return Function.prototype.apply.call(F2, V, args); + } + function promiseCall(F2, V, args) { + try { + return promiseResolvedWith(reflectCall(F2, V, args)); + } catch (value) { + return promiseRejectedWith(value); } - ReadableByteStreamControllerClearAlgorithms(controller); - ReadableStreamClose(stream); } - function ReadableByteStreamControllerEnqueue(controller, chunk) { - const stream = controller._controlledReadableByteStream; - if (controller._closeRequested || stream._state !== "readable") { - return; + const QUEUE_MAX_ARRAY_SIZE = 16384; + class SimpleQueue { + constructor() { + this._cursor = 0; + this._size = 0; + this._front = { + _elements: [], + _next: void 0 + }; + this._back = this._front; + this._cursor = 0; + this._size = 0; } - const { buffer, byteOffset, byteLength } = chunk; - if (IsDetachedBuffer(buffer)) { - throw new TypeError("chunk's buffer is detached and so cannot be enqueued"); + get length() { + return this._size; } - const transferredBuffer = TransferArrayBuffer(buffer); - if (controller._pendingPullIntos.length > 0) { - const firstPendingPullInto = controller._pendingPullIntos.peek(); - if (IsDetachedBuffer(firstPendingPullInto.buffer)) { - throw new TypeError("The BYOB request's buffer has been detached and so cannot be filled with an enqueued chunk"); + // For exception safety, this method is structured in order: + // 1. Read state + // 2. Calculate required state mutations + // 3. Perform state mutations + push(element) { + const oldBack = this._back; + let newBack = oldBack; + if (oldBack._elements.length === QUEUE_MAX_ARRAY_SIZE - 1) { + newBack = { + _elements: [], + _next: void 0 + }; } - ReadableByteStreamControllerInvalidateBYOBRequest(controller); - firstPendingPullInto.buffer = TransferArrayBuffer(firstPendingPullInto.buffer); - if (firstPendingPullInto.readerType === "none") { - ReadableByteStreamControllerEnqueueDetachedPullIntoToQueue(controller, firstPendingPullInto); + oldBack._elements.push(element); + if (newBack !== oldBack) { + this._back = newBack; + oldBack._next = newBack; } + ++this._size; } - if (ReadableStreamHasDefaultReader(stream)) { - ReadableByteStreamControllerProcessReadRequestsUsingQueue(controller); - if (ReadableStreamGetNumReadRequests(stream) === 0) { - ReadableByteStreamControllerEnqueueChunkToQueue(controller, transferredBuffer, byteOffset, byteLength); - } else { - if (controller._pendingPullIntos.length > 0) { - ReadableByteStreamControllerShiftPendingPullInto(controller); + // Like push(), shift() follows the read -> calculate -> mutate pattern for + // exception safety. + shift() { + const oldFront = this._front; + let newFront = oldFront; + const oldCursor = this._cursor; + let newCursor = oldCursor + 1; + const elements = oldFront._elements; + const element = elements[oldCursor]; + if (newCursor === QUEUE_MAX_ARRAY_SIZE) { + newFront = oldFront._next; + newCursor = 0; + } + --this._size; + this._cursor = newCursor; + if (oldFront !== newFront) { + this._front = newFront; + } + elements[oldCursor] = void 0; + return element; + } + // The tricky thing about forEach() is that it can be called + // re-entrantly. The queue may be mutated inside the callback. It is easy to + // see that push() within the callback has no negative effects since the end + // of the queue is checked for on every iteration. If shift() is called + // repeatedly within the callback then the next iteration may return an + // element that has been removed. In this case the callback will be called + // with undefined values until we either "catch up" with elements that still + // exist or reach the back of the queue. + forEach(callback) { + let i2 = this._cursor; + let node = this._front; + let elements = node._elements; + while (i2 !== elements.length || node._next !== void 0) { + if (i2 === elements.length) { + node = node._next; + elements = node._elements; + i2 = 0; + if (elements.length === 0) { + break; + } } - const transferredView = new Uint8Array(transferredBuffer, byteOffset, byteLength); - ReadableStreamFulfillReadRequest(stream, transferredView, false); + callback(elements[i2]); + ++i2; } - } else if (ReadableStreamHasBYOBReader(stream)) { - ReadableByteStreamControllerEnqueueChunkToQueue(controller, transferredBuffer, byteOffset, byteLength); - ReadableByteStreamControllerProcessPullIntoDescriptorsUsingQueue(controller); - } else { - ReadableByteStreamControllerEnqueueChunkToQueue(controller, transferredBuffer, byteOffset, byteLength); } - ReadableByteStreamControllerCallPullIfNeeded(controller); + // Return the element that would be returned if shift() was called now, + // without modifying the queue. + peek() { + const front = this._front; + const cursor = this._cursor; + return front._elements[cursor]; + } } - function ReadableByteStreamControllerError(controller, e2) { - const stream = controller._controlledReadableByteStream; - if (stream._state !== "readable") { - return; + const AbortSteps = Symbol("[[AbortSteps]]"); + const ErrorSteps = Symbol("[[ErrorSteps]]"); + const CancelSteps = Symbol("[[CancelSteps]]"); + const PullSteps = Symbol("[[PullSteps]]"); + const ReleaseSteps = Symbol("[[ReleaseSteps]]"); + function ReadableStreamReaderGenericInitialize(reader, stream) { + reader._ownerReadableStream = stream; + stream._reader = reader; + if (stream._state === "readable") { + defaultReaderClosedPromiseInitialize(reader); + } else if (stream._state === "closed") { + defaultReaderClosedPromiseInitializeAsResolved(reader); + } else { + defaultReaderClosedPromiseInitializeAsRejected(reader, stream._storedError); } - ReadableByteStreamControllerClearPendingPullIntos(controller); - ResetQueue(controller); - ReadableByteStreamControllerClearAlgorithms(controller); - ReadableStreamError(stream, e2); } - function ReadableByteStreamControllerFillReadRequestFromQueue(controller, readRequest) { - const entry = controller._queue.shift(); - controller._queueTotalSize -= entry.byteLength; - ReadableByteStreamControllerHandleQueueDrain(controller); - const view = new Uint8Array(entry.buffer, entry.byteOffset, entry.byteLength); - readRequest._chunkSteps(view); + function ReadableStreamReaderGenericCancel(reader, reason) { + const stream = reader._ownerReadableStream; + return ReadableStreamCancel(stream, reason); } - function ReadableByteStreamControllerGetBYOBRequest(controller) { - if (controller._byobRequest === null && controller._pendingPullIntos.length > 0) { - const firstDescriptor = controller._pendingPullIntos.peek(); - const view = new Uint8Array(firstDescriptor.buffer, firstDescriptor.byteOffset + firstDescriptor.bytesFilled, firstDescriptor.byteLength - firstDescriptor.bytesFilled); - const byobRequest = Object.create(ReadableStreamBYOBRequest.prototype); - SetUpReadableStreamBYOBRequest(byobRequest, controller, view); - controller._byobRequest = byobRequest; + function ReadableStreamReaderGenericRelease(reader) { + const stream = reader._ownerReadableStream; + if (stream._state === "readable") { + defaultReaderClosedPromiseReject(reader, new TypeError(`Reader was released and can no longer be used to monitor the stream's closedness`)); + } else { + defaultReaderClosedPromiseResetToRejected(reader, new TypeError(`Reader was released and can no longer be used to monitor the stream's closedness`)); } - return controller._byobRequest; + stream._readableStreamController[ReleaseSteps](); + stream._reader = void 0; + reader._ownerReadableStream = void 0; } - function ReadableByteStreamControllerGetDesiredSize(controller) { - const state = controller._controlledReadableByteStream._state; - if (state === "errored") { - return null; + function readerLockException(name) { + return new TypeError("Cannot " + name + " a stream using a released reader"); + } + function defaultReaderClosedPromiseInitialize(reader) { + reader._closedPromise = newPromise((resolve, reject) => { + reader._closedPromise_resolve = resolve; + reader._closedPromise_reject = reject; + }); + } + function defaultReaderClosedPromiseInitializeAsRejected(reader, reason) { + defaultReaderClosedPromiseInitialize(reader); + defaultReaderClosedPromiseReject(reader, reason); + } + function defaultReaderClosedPromiseInitializeAsResolved(reader) { + defaultReaderClosedPromiseInitialize(reader); + defaultReaderClosedPromiseResolve(reader); + } + function defaultReaderClosedPromiseReject(reader, reason) { + if (reader._closedPromise_reject === void 0) { + return; } - if (state === "closed") { - return 0; + setPromiseIsHandledToTrue(reader._closedPromise); + reader._closedPromise_reject(reason); + reader._closedPromise_resolve = void 0; + reader._closedPromise_reject = void 0; + } + function defaultReaderClosedPromiseResetToRejected(reader, reason) { + defaultReaderClosedPromiseInitializeAsRejected(reader, reason); + } + function defaultReaderClosedPromiseResolve(reader) { + if (reader._closedPromise_resolve === void 0) { + return; } - return controller._strategyHWM - controller._queueTotalSize; + reader._closedPromise_resolve(void 0); + reader._closedPromise_resolve = void 0; + reader._closedPromise_reject = void 0; } - function ReadableByteStreamControllerRespond(controller, bytesWritten) { - const firstDescriptor = controller._pendingPullIntos.peek(); - const state = controller._controlledReadableByteStream._state; - if (state === "closed") { - if (bytesWritten !== 0) { - throw new TypeError("bytesWritten must be 0 when calling respond() on a closed stream"); - } - } else { - if (bytesWritten === 0) { - throw new TypeError("bytesWritten must be greater than 0 when calling respond() on a readable stream"); - } - if (firstDescriptor.bytesFilled + bytesWritten > firstDescriptor.byteLength) { - throw new RangeError("bytesWritten out of range"); - } - } - firstDescriptor.buffer = TransferArrayBuffer(firstDescriptor.buffer); - ReadableByteStreamControllerRespondInternal(controller, bytesWritten); + const NumberIsFinite = Number.isFinite || function(x2) { + return typeof x2 === "number" && isFinite(x2); + }; + const MathTrunc = Math.trunc || function(v) { + return v < 0 ? Math.ceil(v) : Math.floor(v); + }; + function isDictionary(x2) { + return typeof x2 === "object" || typeof x2 === "function"; } - function ReadableByteStreamControllerRespondWithNewView(controller, view) { - const firstDescriptor = controller._pendingPullIntos.peek(); - const state = controller._controlledReadableByteStream._state; - if (state === "closed") { - if (view.byteLength !== 0) { - throw new TypeError("The view's length must be 0 when calling respondWithNewView() on a closed stream"); - } - } else { - if (view.byteLength === 0) { - throw new TypeError("The view's length must be greater than 0 when calling respondWithNewView() on a readable stream"); - } - } - if (firstDescriptor.byteOffset + firstDescriptor.bytesFilled !== view.byteOffset) { - throw new RangeError("The region specified by view does not match byobRequest"); - } - if (firstDescriptor.bufferByteLength !== view.buffer.byteLength) { - throw new RangeError("The buffer of view has different capacity than byobRequest"); + function assertDictionary(obj, context2) { + if (obj !== void 0 && !isDictionary(obj)) { + throw new TypeError(`${context2} is not an object.`); } - if (firstDescriptor.bytesFilled + view.byteLength > firstDescriptor.byteLength) { - throw new RangeError("The region specified by view is larger than byobRequest"); + } + function assertFunction(x2, context2) { + if (typeof x2 !== "function") { + throw new TypeError(`${context2} is not a function.`); } - const viewByteLength = view.byteLength; - firstDescriptor.buffer = TransferArrayBuffer(view.buffer); - ReadableByteStreamControllerRespondInternal(controller, viewByteLength); } - function SetUpReadableByteStreamController(stream, controller, startAlgorithm, pullAlgorithm, cancelAlgorithm, highWaterMark, autoAllocateChunkSize) { - controller._controlledReadableByteStream = stream; - controller._pullAgain = false; - controller._pulling = false; - controller._byobRequest = null; - controller._queue = controller._queueTotalSize = void 0; - ResetQueue(controller); - controller._closeRequested = false; - controller._started = false; - controller._strategyHWM = highWaterMark; - controller._pullAlgorithm = pullAlgorithm; - controller._cancelAlgorithm = cancelAlgorithm; - controller._autoAllocateChunkSize = autoAllocateChunkSize; - controller._pendingPullIntos = new SimpleQueue(); - stream._readableStreamController = controller; - const startResult = startAlgorithm(); - uponPromise(promiseResolvedWith(startResult), () => { - controller._started = true; - ReadableByteStreamControllerCallPullIfNeeded(controller); - return null; - }, (r2) => { - ReadableByteStreamControllerError(controller, r2); - return null; - }); + function isObject2(x2) { + return typeof x2 === "object" && x2 !== null || typeof x2 === "function"; } - function SetUpReadableByteStreamControllerFromUnderlyingSource(stream, underlyingByteSource, highWaterMark) { - const controller = Object.create(ReadableByteStreamController.prototype); - let startAlgorithm; - let pullAlgorithm; - let cancelAlgorithm; - if (underlyingByteSource.start !== void 0) { - startAlgorithm = () => underlyingByteSource.start(controller); - } else { - startAlgorithm = () => void 0; - } - if (underlyingByteSource.pull !== void 0) { - pullAlgorithm = () => underlyingByteSource.pull(controller); - } else { - pullAlgorithm = () => promiseResolvedWith(void 0); - } - if (underlyingByteSource.cancel !== void 0) { - cancelAlgorithm = (reason) => underlyingByteSource.cancel(reason); - } else { - cancelAlgorithm = () => promiseResolvedWith(void 0); + function assertObject(x2, context2) { + if (!isObject2(x2)) { + throw new TypeError(`${context2} is not an object.`); } - const autoAllocateChunkSize = underlyingByteSource.autoAllocateChunkSize; - if (autoAllocateChunkSize === 0) { - throw new TypeError("autoAllocateChunkSize must be greater than 0"); + } + function assertRequiredArgument(x2, position, context2) { + if (x2 === void 0) { + throw new TypeError(`Parameter ${position} is required in '${context2}'.`); } - SetUpReadableByteStreamController(stream, controller, startAlgorithm, pullAlgorithm, cancelAlgorithm, highWaterMark, autoAllocateChunkSize); } - function SetUpReadableStreamBYOBRequest(request2, controller, view) { - request2._associatedReadableByteStreamController = controller; - request2._view = view; + function assertRequiredField(x2, field, context2) { + if (x2 === void 0) { + throw new TypeError(`${field} is required in '${context2}'.`); + } } - function byobRequestBrandCheckException(name) { - return new TypeError(`ReadableStreamBYOBRequest.prototype.${name} can only be used on a ReadableStreamBYOBRequest`); + function convertUnrestrictedDouble(value) { + return Number(value); } - function byteStreamControllerBrandCheckException(name) { - return new TypeError(`ReadableByteStreamController.prototype.${name} can only be used on a ReadableByteStreamController`); + function censorNegativeZero(x2) { + return x2 === 0 ? 0 : x2; } - function convertReaderOptions(options, context2) { - assertDictionary(options, context2); - const mode = options === null || options === void 0 ? void 0 : options.mode; - return { - mode: mode === void 0 ? void 0 : convertReadableStreamReaderMode(mode, `${context2} has member 'mode' that`) - }; + function integerPart(x2) { + return censorNegativeZero(MathTrunc(x2)); } - function convertReadableStreamReaderMode(mode, context2) { - mode = `${mode}`; - if (mode !== "byob") { - throw new TypeError(`${context2} '${mode}' is not a valid enumeration value for ReadableStreamReaderMode`); + function convertUnsignedLongLongWithEnforceRange(value, context2) { + const lowerBound = 0; + const upperBound = Number.MAX_SAFE_INTEGER; + let x2 = Number(value); + x2 = censorNegativeZero(x2); + if (!NumberIsFinite(x2)) { + throw new TypeError(`${context2} is not a finite number`); } - return mode; + x2 = integerPart(x2); + if (x2 < lowerBound || x2 > upperBound) { + throw new TypeError(`${context2} is outside the accepted range of ${lowerBound} to ${upperBound}, inclusive`); + } + if (!NumberIsFinite(x2) || x2 === 0) { + return 0; + } + return x2; } - function convertByobReadOptions(options, context2) { - var _a3; - assertDictionary(options, context2); - const min = (_a3 = options === null || options === void 0 ? void 0 : options.min) !== null && _a3 !== void 0 ? _a3 : 1; - return { - min: convertUnsignedLongLongWithEnforceRange(min, `${context2} has member 'min' that`) - }; + function assertReadableStream(x2, context2) { + if (!IsReadableStream(x2)) { + throw new TypeError(`${context2} is not a ReadableStream.`); + } } - function AcquireReadableStreamBYOBReader(stream) { - return new ReadableStreamBYOBReader(stream); + function AcquireReadableStreamDefaultReader(stream) { + return new ReadableStreamDefaultReader(stream); } - function ReadableStreamAddReadIntoRequest(stream, readIntoRequest) { - stream._reader._readIntoRequests.push(readIntoRequest); + function ReadableStreamAddReadRequest(stream, readRequest) { + stream._reader._readRequests.push(readRequest); } - function ReadableStreamFulfillReadIntoRequest(stream, chunk, done) { + function ReadableStreamFulfillReadRequest(stream, chunk, done) { const reader = stream._reader; - const readIntoRequest = reader._readIntoRequests.shift(); + const readRequest = reader._readRequests.shift(); if (done) { - readIntoRequest._closeSteps(chunk); + readRequest._closeSteps(); } else { - readIntoRequest._chunkSteps(chunk); + readRequest._chunkSteps(chunk); } } - function ReadableStreamGetNumReadIntoRequests(stream) { - return stream._reader._readIntoRequests.length; + function ReadableStreamGetNumReadRequests(stream) { + return stream._reader._readRequests.length; } - function ReadableStreamHasBYOBReader(stream) { + function ReadableStreamHasDefaultReader(stream) { const reader = stream._reader; if (reader === void 0) { return false; } - if (!IsReadableStreamBYOBReader(reader)) { + if (!IsReadableStreamDefaultReader(reader)) { return false; } return true; } - class ReadableStreamBYOBReader { + class ReadableStreamDefaultReader { constructor(stream) { - assertRequiredArgument(stream, 1, "ReadableStreamBYOBReader"); + assertRequiredArgument(stream, 1, "ReadableStreamDefaultReader"); assertReadableStream(stream, "First parameter"); if (IsReadableStreamLocked(stream)) { throw new TypeError("This stream has already been locked for exclusive reading by another reader"); } - if (!IsReadableByteStreamController(stream._readableStreamController)) { - throw new TypeError("Cannot construct a ReadableStreamBYOBReader for a stream not constructed with a byte source"); - } ReadableStreamReaderGenericInitialize(this, stream); - this._readIntoRequests = new SimpleQueue(); + this._readRequests = new SimpleQueue(); } /** - * Returns a promise that will be fulfilled when the stream becomes closed, or rejected if the stream ever errors or - * the reader's lock is released before the stream finishes closing. + * Returns a promise that will be fulfilled when the stream becomes closed, + * or rejected if the stream ever errors or the reader's lock is released before the stream finishes closing. */ get closed() { - if (!IsReadableStreamBYOBReader(this)) { - return promiseRejectedWith(byobReaderBrandCheckException("closed")); + if (!IsReadableStreamDefaultReader(this)) { + return promiseRejectedWith(defaultReaderBrandCheckException("closed")); } return this._closedPromise; } @@ -45433,46 +45606,22 @@ var require_ponyfill_es2018 = __commonJS({ * If the reader is active, behaves the same as {@link ReadableStream.cancel | stream.cancel(reason)}. */ cancel(reason = void 0) { - if (!IsReadableStreamBYOBReader(this)) { - return promiseRejectedWith(byobReaderBrandCheckException("cancel")); + if (!IsReadableStreamDefaultReader(this)) { + return promiseRejectedWith(defaultReaderBrandCheckException("cancel")); } if (this._ownerReadableStream === void 0) { return promiseRejectedWith(readerLockException("cancel")); } return ReadableStreamReaderGenericCancel(this, reason); } - read(view, rawOptions = {}) { - if (!IsReadableStreamBYOBReader(this)) { - return promiseRejectedWith(byobReaderBrandCheckException("read")); - } - if (!ArrayBuffer.isView(view)) { - return promiseRejectedWith(new TypeError("view must be an array buffer view")); - } - if (view.byteLength === 0) { - return promiseRejectedWith(new TypeError("view must have non-zero byteLength")); - } - if (view.buffer.byteLength === 0) { - return promiseRejectedWith(new TypeError(`view's buffer must have non-zero byteLength`)); - } - if (IsDetachedBuffer(view.buffer)) { - return promiseRejectedWith(new TypeError("view's buffer has been detached")); - } - let options; - try { - options = convertByobReadOptions(rawOptions, "options"); - } catch (e2) { - return promiseRejectedWith(e2); - } - const min = options.min; - if (min === 0) { - return promiseRejectedWith(new TypeError("options.min must be greater than 0")); - } - if (!isDataView(view)) { - if (min > view.length) { - return promiseRejectedWith(new RangeError("options.min must be less than or equal to view's length")); - } - } else if (min > view.byteLength) { - return promiseRejectedWith(new RangeError("options.min must be less than or equal to view's byteLength")); + /** + * Returns a promise that allows access to the next chunk from the stream's internal queue, if available. + * + * If reading a chunk causes the queue to become empty, more data will be pulled from the underlying source. + */ + read() { + if (!IsReadableStreamDefaultReader(this)) { + return promiseRejectedWith(defaultReaderBrandCheckException("read")); } if (this._ownerReadableStream === void 0) { return promiseRejectedWith(readerLockException("read from")); @@ -45483,12 +45632,12 @@ var require_ponyfill_es2018 = __commonJS({ resolvePromise = resolve; rejectPromise = reject; }); - const readIntoRequest = { + const readRequest = { _chunkSteps: (chunk) => resolvePromise({ value: chunk, done: false }), - _closeSteps: (chunk) => resolvePromise({ value: chunk, done: true }), + _closeSteps: () => resolvePromise({ value: void 0, done: true }), _errorSteps: (e2) => rejectPromise(e2) }; - ReadableStreamBYOBReaderRead(this, view, min, readIntoRequest); + ReadableStreamDefaultReaderRead(this, readRequest); return promise; } /** @@ -45497,3865 +45646,4986 @@ var require_ponyfill_es2018 = __commonJS({ * from now on; otherwise, the reader will appear closed. * * A reader's lock cannot be released while it still has a pending read request, i.e., if a promise returned by - * the reader's {@link ReadableStreamBYOBReader.read | read()} method has not yet been settled. Attempting to + * the reader's {@link ReadableStreamDefaultReader.read | read()} method has not yet been settled. Attempting to * do so will throw a `TypeError` and leave the reader locked to the stream. */ releaseLock() { - if (!IsReadableStreamBYOBReader(this)) { - throw byobReaderBrandCheckException("releaseLock"); + if (!IsReadableStreamDefaultReader(this)) { + throw defaultReaderBrandCheckException("releaseLock"); } if (this._ownerReadableStream === void 0) { return; } - ReadableStreamBYOBReaderRelease(this); + ReadableStreamDefaultReaderRelease(this); } } - Object.defineProperties(ReadableStreamBYOBReader.prototype, { + Object.defineProperties(ReadableStreamDefaultReader.prototype, { cancel: { enumerable: true }, read: { enumerable: true }, releaseLock: { enumerable: true }, closed: { enumerable: true } }); - setFunctionName(ReadableStreamBYOBReader.prototype.cancel, "cancel"); - setFunctionName(ReadableStreamBYOBReader.prototype.read, "read"); - setFunctionName(ReadableStreamBYOBReader.prototype.releaseLock, "releaseLock"); + setFunctionName(ReadableStreamDefaultReader.prototype.cancel, "cancel"); + setFunctionName(ReadableStreamDefaultReader.prototype.read, "read"); + setFunctionName(ReadableStreamDefaultReader.prototype.releaseLock, "releaseLock"); if (typeof Symbol.toStringTag === "symbol") { - Object.defineProperty(ReadableStreamBYOBReader.prototype, Symbol.toStringTag, { - value: "ReadableStreamBYOBReader", + Object.defineProperty(ReadableStreamDefaultReader.prototype, Symbol.toStringTag, { + value: "ReadableStreamDefaultReader", configurable: true }); } - function IsReadableStreamBYOBReader(x2) { + function IsReadableStreamDefaultReader(x2) { if (!typeIsObject(x2)) { return false; } - if (!Object.prototype.hasOwnProperty.call(x2, "_readIntoRequests")) { + if (!Object.prototype.hasOwnProperty.call(x2, "_readRequests")) { return false; } - return x2 instanceof ReadableStreamBYOBReader; + return x2 instanceof ReadableStreamDefaultReader; } - function ReadableStreamBYOBReaderRead(reader, view, min, readIntoRequest) { + function ReadableStreamDefaultReaderRead(reader, readRequest) { const stream = reader._ownerReadableStream; stream._disturbed = true; - if (stream._state === "errored") { - readIntoRequest._errorSteps(stream._storedError); + if (stream._state === "closed") { + readRequest._closeSteps(); + } else if (stream._state === "errored") { + readRequest._errorSteps(stream._storedError); } else { - ReadableByteStreamControllerPullInto(stream._readableStreamController, view, min, readIntoRequest); + stream._readableStreamController[PullSteps](readRequest); } } - function ReadableStreamBYOBReaderRelease(reader) { + function ReadableStreamDefaultReaderRelease(reader) { ReadableStreamReaderGenericRelease(reader); const e2 = new TypeError("Reader was released"); - ReadableStreamBYOBReaderErrorReadIntoRequests(reader, e2); + ReadableStreamDefaultReaderErrorReadRequests(reader, e2); } - function ReadableStreamBYOBReaderErrorReadIntoRequests(reader, e2) { - const readIntoRequests = reader._readIntoRequests; - reader._readIntoRequests = new SimpleQueue(); - readIntoRequests.forEach((readIntoRequest) => { - readIntoRequest._errorSteps(e2); + function ReadableStreamDefaultReaderErrorReadRequests(reader, e2) { + const readRequests = reader._readRequests; + reader._readRequests = new SimpleQueue(); + readRequests.forEach((readRequest) => { + readRequest._errorSteps(e2); }); } - function byobReaderBrandCheckException(name) { - return new TypeError(`ReadableStreamBYOBReader.prototype.${name} can only be used on a ReadableStreamBYOBReader`); + function defaultReaderBrandCheckException(name) { + return new TypeError(`ReadableStreamDefaultReader.prototype.${name} can only be used on a ReadableStreamDefaultReader`); } - function ExtractHighWaterMark(strategy, defaultHWM) { - const { highWaterMark } = strategy; - if (highWaterMark === void 0) { - return defaultHWM; + const AsyncIteratorPrototype = Object.getPrototypeOf(Object.getPrototypeOf(async function* () { + }).prototype); + class ReadableStreamAsyncIteratorImpl { + constructor(reader, preventCancel) { + this._ongoingPromise = void 0; + this._isFinished = false; + this._reader = reader; + this._preventCancel = preventCancel; } - if (NumberIsNaN(highWaterMark) || highWaterMark < 0) { - throw new RangeError("Invalid highWaterMark"); + next() { + const nextSteps = () => this._nextSteps(); + this._ongoingPromise = this._ongoingPromise ? transformPromiseWith(this._ongoingPromise, nextSteps, nextSteps) : nextSteps(); + return this._ongoingPromise; } - return highWaterMark; - } - function ExtractSizeAlgorithm(strategy) { - const { size } = strategy; - if (!size) { - return () => 1; + return(value) { + const returnSteps = () => this._returnSteps(value); + return this._ongoingPromise ? transformPromiseWith(this._ongoingPromise, returnSteps, returnSteps) : returnSteps(); + } + _nextSteps() { + if (this._isFinished) { + return Promise.resolve({ value: void 0, done: true }); + } + const reader = this._reader; + let resolvePromise; + let rejectPromise; + const promise = newPromise((resolve, reject) => { + resolvePromise = resolve; + rejectPromise = reject; + }); + const readRequest = { + _chunkSteps: (chunk) => { + this._ongoingPromise = void 0; + _queueMicrotask(() => resolvePromise({ value: chunk, done: false })); + }, + _closeSteps: () => { + this._ongoingPromise = void 0; + this._isFinished = true; + ReadableStreamReaderGenericRelease(reader); + resolvePromise({ value: void 0, done: true }); + }, + _errorSteps: (reason) => { + this._ongoingPromise = void 0; + this._isFinished = true; + ReadableStreamReaderGenericRelease(reader); + rejectPromise(reason); + } + }; + ReadableStreamDefaultReaderRead(reader, readRequest); + return promise; + } + _returnSteps(value) { + if (this._isFinished) { + return Promise.resolve({ value, done: true }); + } + this._isFinished = true; + const reader = this._reader; + if (!this._preventCancel) { + const result = ReadableStreamReaderGenericCancel(reader, value); + ReadableStreamReaderGenericRelease(reader); + return transformPromiseWith(result, () => ({ value, done: true })); + } + ReadableStreamReaderGenericRelease(reader); + return promiseResolvedWith({ value, done: true }); } - return size; - } - function convertQueuingStrategy(init, context2) { - assertDictionary(init, context2); - const highWaterMark = init === null || init === void 0 ? void 0 : init.highWaterMark; - const size = init === null || init === void 0 ? void 0 : init.size; - return { - highWaterMark: highWaterMark === void 0 ? void 0 : convertUnrestrictedDouble(highWaterMark), - size: size === void 0 ? void 0 : convertQueuingStrategySize(size, `${context2} has member 'size' that`) - }; - } - function convertQueuingStrategySize(fn, context2) { - assertFunction(fn, context2); - return (chunk) => convertUnrestrictedDouble(fn(chunk)); - } - function convertUnderlyingSink(original, context2) { - assertDictionary(original, context2); - const abort = original === null || original === void 0 ? void 0 : original.abort; - const close = original === null || original === void 0 ? void 0 : original.close; - const start = original === null || original === void 0 ? void 0 : original.start; - const type = original === null || original === void 0 ? void 0 : original.type; - const write = original === null || original === void 0 ? void 0 : original.write; - return { - abort: abort === void 0 ? void 0 : convertUnderlyingSinkAbortCallback(abort, original, `${context2} has member 'abort' that`), - close: close === void 0 ? void 0 : convertUnderlyingSinkCloseCallback(close, original, `${context2} has member 'close' that`), - start: start === void 0 ? void 0 : convertUnderlyingSinkStartCallback(start, original, `${context2} has member 'start' that`), - write: write === void 0 ? void 0 : convertUnderlyingSinkWriteCallback(write, original, `${context2} has member 'write' that`), - type - }; - } - function convertUnderlyingSinkAbortCallback(fn, original, context2) { - assertFunction(fn, context2); - return (reason) => promiseCall(fn, original, [reason]); - } - function convertUnderlyingSinkCloseCallback(fn, original, context2) { - assertFunction(fn, context2); - return () => promiseCall(fn, original, []); - } - function convertUnderlyingSinkStartCallback(fn, original, context2) { - assertFunction(fn, context2); - return (controller) => reflectCall(fn, original, [controller]); - } - function convertUnderlyingSinkWriteCallback(fn, original, context2) { - assertFunction(fn, context2); - return (chunk, controller) => promiseCall(fn, original, [chunk, controller]); } - function assertWritableStream(x2, context2) { - if (!IsWritableStream(x2)) { - throw new TypeError(`${context2} is not a WritableStream.`); + const ReadableStreamAsyncIteratorPrototype = { + next() { + if (!IsReadableStreamAsyncIterator(this)) { + return promiseRejectedWith(streamAsyncIteratorBrandCheckException("next")); + } + return this._asyncIteratorImpl.next(); + }, + return(value) { + if (!IsReadableStreamAsyncIterator(this)) { + return promiseRejectedWith(streamAsyncIteratorBrandCheckException("return")); + } + return this._asyncIteratorImpl.return(value); } + }; + Object.setPrototypeOf(ReadableStreamAsyncIteratorPrototype, AsyncIteratorPrototype); + function AcquireReadableStreamAsyncIterator(stream, preventCancel) { + const reader = AcquireReadableStreamDefaultReader(stream); + const impl = new ReadableStreamAsyncIteratorImpl(reader, preventCancel); + const iterator3 = Object.create(ReadableStreamAsyncIteratorPrototype); + iterator3._asyncIteratorImpl = impl; + return iterator3; } - function isAbortSignal2(value) { - if (typeof value !== "object" || value === null) { + function IsReadableStreamAsyncIterator(x2) { + if (!typeIsObject(x2)) { + return false; + } + if (!Object.prototype.hasOwnProperty.call(x2, "_asyncIteratorImpl")) { return false; } try { - return typeof value.aborted === "boolean"; + return x2._asyncIteratorImpl instanceof ReadableStreamAsyncIteratorImpl; } catch (_a3) { return false; } } - const supportsAbortController = typeof AbortController === "function"; - function createAbortController() { - if (supportsAbortController) { - return new AbortController(); - } - return void 0; + function streamAsyncIteratorBrandCheckException(name) { + return new TypeError(`ReadableStreamAsyncIterator.${name} can only be used on a ReadableSteamAsyncIterator`); } - class WritableStream { - constructor(rawUnderlyingSink = {}, rawStrategy = {}) { - if (rawUnderlyingSink === void 0) { - rawUnderlyingSink = null; - } else { - assertObject(rawUnderlyingSink, "First parameter"); - } - const strategy = convertQueuingStrategy(rawStrategy, "Second parameter"); - const underlyingSink = convertUnderlyingSink(rawUnderlyingSink, "First parameter"); - InitializeWritableStream(this); - const type = underlyingSink.type; - if (type !== void 0) { - throw new RangeError("Invalid type is specified"); - } - const sizeAlgorithm = ExtractSizeAlgorithm(strategy); - const highWaterMark = ExtractHighWaterMark(strategy, 1); - SetUpWritableStreamDefaultControllerFromUnderlyingSink(this, underlyingSink, highWaterMark, sizeAlgorithm); + const NumberIsNaN = Number.isNaN || function(x2) { + return x2 !== x2; + }; + var _a2, _b, _c; + function CreateArrayFromList(elements) { + return elements.slice(); + } + function CopyDataBlockBytes(dest, destOffset, src, srcOffset, n) { + new Uint8Array(dest).set(new Uint8Array(src, srcOffset, n), destOffset); + } + let TransferArrayBuffer = (O) => { + if (typeof O.transfer === "function") { + TransferArrayBuffer = (buffer) => buffer.transfer(); + } else if (typeof structuredClone === "function") { + TransferArrayBuffer = (buffer) => structuredClone(buffer, { transfer: [buffer] }); + } else { + TransferArrayBuffer = (buffer) => buffer; } - /** - * Returns whether or not the writable stream is locked to a writer. - */ - get locked() { - if (!IsWritableStream(this)) { - throw streamBrandCheckException$2("locked"); - } - return IsWritableStreamLocked(this); + return TransferArrayBuffer(O); + }; + let IsDetachedBuffer = (O) => { + if (typeof O.detached === "boolean") { + IsDetachedBuffer = (buffer) => buffer.detached; + } else { + IsDetachedBuffer = (buffer) => buffer.byteLength === 0; } - /** - * Aborts the stream, signaling that the producer can no longer successfully write to the stream and it is to be - * immediately moved to an errored state, with any queued-up writes discarded. This will also execute any abort - * mechanism of the underlying sink. - * - * The returned promise will fulfill if the stream shuts down successfully, or reject if the underlying sink signaled - * that there was an error doing so. Additionally, it will reject with a `TypeError` (without attempting to cancel - * the stream) if the stream is currently locked. - */ - abort(reason = void 0) { - if (!IsWritableStream(this)) { - return promiseRejectedWith(streamBrandCheckException$2("abort")); - } - if (IsWritableStreamLocked(this)) { - return promiseRejectedWith(new TypeError("Cannot abort a stream that already has a writer")); - } - return WritableStreamAbort(this, reason); - } - /** - * Closes the stream. The underlying sink will finish processing any previously-written chunks, before invoking its - * close behavior. During this time any further attempts to write will fail (without erroring the stream). - * - * The method returns a promise that will fulfill if all remaining chunks are successfully written and the stream - * successfully closes, or rejects if an error is encountered during this process. Additionally, it will reject with - * a `TypeError` (without attempting to cancel the stream) if the stream is currently locked. - */ - close() { - if (!IsWritableStream(this)) { - return promiseRejectedWith(streamBrandCheckException$2("close")); - } - if (IsWritableStreamLocked(this)) { - return promiseRejectedWith(new TypeError("Cannot close a stream that already has a writer")); - } - if (WritableStreamCloseQueuedOrInFlight(this)) { - return promiseRejectedWith(new TypeError("Cannot close an already-closing stream")); - } - return WritableStreamClose(this); - } - /** - * Creates a {@link WritableStreamDefaultWriter | writer} and locks the stream to the new writer. While the stream - * is locked, no other writer can be acquired until this one is released. - * - * This functionality is especially useful for creating abstractions that desire the ability to write to a stream - * without interruption or interleaving. By getting a writer for the stream, you can ensure nobody else can write at - * the same time, which would cause the resulting written data to be unpredictable and probably useless. - */ - getWriter() { - if (!IsWritableStream(this)) { - throw streamBrandCheckException$2("getWriter"); - } - return AcquireWritableStreamDefaultWriter(this); + return IsDetachedBuffer(O); + }; + function ArrayBufferSlice(buffer, begin, end) { + if (buffer.slice) { + return buffer.slice(begin, end); } + const length = end - begin; + const slice = new ArrayBuffer(length); + CopyDataBlockBytes(slice, 0, buffer, begin, length); + return slice; } - Object.defineProperties(WritableStream.prototype, { - abort: { enumerable: true }, - close: { enumerable: true }, - getWriter: { enumerable: true }, - locked: { enumerable: true } - }); - setFunctionName(WritableStream.prototype.abort, "abort"); - setFunctionName(WritableStream.prototype.close, "close"); - setFunctionName(WritableStream.prototype.getWriter, "getWriter"); - if (typeof Symbol.toStringTag === "symbol") { - Object.defineProperty(WritableStream.prototype, Symbol.toStringTag, { - value: "WritableStream", - configurable: true - }); - } - function AcquireWritableStreamDefaultWriter(stream) { - return new WritableStreamDefaultWriter(stream); - } - function CreateWritableStream(startAlgorithm, writeAlgorithm, closeAlgorithm, abortAlgorithm, highWaterMark = 1, sizeAlgorithm = () => 1) { - const stream = Object.create(WritableStream.prototype); - InitializeWritableStream(stream); - const controller = Object.create(WritableStreamDefaultController.prototype); - SetUpWritableStreamDefaultController(stream, controller, startAlgorithm, writeAlgorithm, closeAlgorithm, abortAlgorithm, highWaterMark, sizeAlgorithm); - return stream; - } - function InitializeWritableStream(stream) { - stream._state = "writable"; - stream._storedError = void 0; - stream._writer = void 0; - stream._writableStreamController = void 0; - stream._writeRequests = new SimpleQueue(); - stream._inFlightWriteRequest = void 0; - stream._closeRequest = void 0; - stream._inFlightCloseRequest = void 0; - stream._pendingAbortRequest = void 0; - stream._backpressure = false; - } - function IsWritableStream(x2) { - if (!typeIsObject(x2)) { - return false; + function GetMethod(receiver, prop) { + const func = receiver[prop]; + if (func === void 0 || func === null) { + return void 0; } - if (!Object.prototype.hasOwnProperty.call(x2, "_writableStreamController")) { - return false; + if (typeof func !== "function") { + throw new TypeError(`${String(prop)} is not a function`); } - return x2 instanceof WritableStream; + return func; } - function IsWritableStreamLocked(stream) { - if (stream._writer === void 0) { - return false; - } - return true; + function CreateAsyncFromSyncIterator(syncIteratorRecord) { + const syncIterable = { + [Symbol.iterator]: () => syncIteratorRecord.iterator + }; + const asyncIterator = async function* () { + return yield* syncIterable; + }(); + const nextMethod = asyncIterator.next; + return { iterator: asyncIterator, nextMethod, done: false }; } - function WritableStreamAbort(stream, reason) { - var _a3; - if (stream._state === "closed" || stream._state === "errored") { - return promiseResolvedWith(void 0); - } - stream._writableStreamController._abortReason = reason; - (_a3 = stream._writableStreamController._abortController) === null || _a3 === void 0 ? void 0 : _a3.abort(reason); - const state = stream._state; - if (state === "closed" || state === "errored") { - return promiseResolvedWith(void 0); - } - if (stream._pendingAbortRequest !== void 0) { - return stream._pendingAbortRequest._promise; + const SymbolAsyncIterator = (_c = (_a2 = Symbol.asyncIterator) !== null && _a2 !== void 0 ? _a2 : (_b = Symbol.for) === null || _b === void 0 ? void 0 : _b.call(Symbol, "Symbol.asyncIterator")) !== null && _c !== void 0 ? _c : "@@asyncIterator"; + function GetIterator(obj, hint = "sync", method) { + if (method === void 0) { + if (hint === "async") { + method = GetMethod(obj, SymbolAsyncIterator); + if (method === void 0) { + const syncMethod = GetMethod(obj, Symbol.iterator); + const syncIteratorRecord = GetIterator(obj, "sync", syncMethod); + return CreateAsyncFromSyncIterator(syncIteratorRecord); + } + } else { + method = GetMethod(obj, Symbol.iterator); + } } - let wasAlreadyErroring = false; - if (state === "erroring") { - wasAlreadyErroring = true; - reason = void 0; + if (method === void 0) { + throw new TypeError("The object is not iterable"); } - const promise = newPromise((resolve, reject) => { - stream._pendingAbortRequest = { - _promise: void 0, - _resolve: resolve, - _reject: reject, - _reason: reason, - _wasAlreadyErroring: wasAlreadyErroring - }; - }); - stream._pendingAbortRequest._promise = promise; - if (!wasAlreadyErroring) { - WritableStreamStartErroring(stream, reason); + const iterator3 = reflectCall(method, obj, []); + if (!typeIsObject(iterator3)) { + throw new TypeError("The iterator method must return an object"); } - return promise; + const nextMethod = iterator3.next; + return { iterator: iterator3, nextMethod, done: false }; } - function WritableStreamClose(stream) { - const state = stream._state; - if (state === "closed" || state === "errored") { - return promiseRejectedWith(new TypeError(`The stream (in ${state} state) is not in the writable state and cannot be closed`)); - } - const promise = newPromise((resolve, reject) => { - const closeRequest = { - _resolve: resolve, - _reject: reject - }; - stream._closeRequest = closeRequest; - }); - const writer = stream._writer; - if (writer !== void 0 && stream._backpressure && state === "writable") { - defaultWriterReadyPromiseResolve(writer); + function IteratorNext(iteratorRecord) { + const result = reflectCall(iteratorRecord.nextMethod, iteratorRecord.iterator, []); + if (!typeIsObject(result)) { + throw new TypeError("The iterator.next() method must return an object"); } - WritableStreamDefaultControllerClose(stream._writableStreamController); - return promise; + return result; } - function WritableStreamAddWriteRequest(stream) { - const promise = newPromise((resolve, reject) => { - const writeRequest = { - _resolve: resolve, - _reject: reject - }; - stream._writeRequests.push(writeRequest); - }); - return promise; + function IteratorComplete(iterResult) { + return Boolean(iterResult.done); } - function WritableStreamDealWithRejection(stream, error) { - const state = stream._state; - if (state === "writable") { - WritableStreamStartErroring(stream, error); - return; - } - WritableStreamFinishErroring(stream); + function IteratorValue(iterResult) { + return iterResult.value; } - function WritableStreamStartErroring(stream, reason) { - const controller = stream._writableStreamController; - stream._state = "erroring"; - stream._storedError = reason; - const writer = stream._writer; - if (writer !== void 0) { - WritableStreamDefaultWriterEnsureReadyPromiseRejected(writer, reason); - } - if (!WritableStreamHasOperationMarkedInFlight(stream) && controller._started) { - WritableStreamFinishErroring(stream); + function IsNonNegativeNumber(v) { + if (typeof v !== "number") { + return false; } - } - function WritableStreamFinishErroring(stream) { - stream._state = "errored"; - stream._writableStreamController[ErrorSteps](); - const storedError = stream._storedError; - stream._writeRequests.forEach((writeRequest) => { - writeRequest._reject(storedError); - }); - stream._writeRequests = new SimpleQueue(); - if (stream._pendingAbortRequest === void 0) { - WritableStreamRejectCloseAndClosedPromiseIfNeeded(stream); - return; + if (NumberIsNaN(v)) { + return false; } - const abortRequest = stream._pendingAbortRequest; - stream._pendingAbortRequest = void 0; - if (abortRequest._wasAlreadyErroring) { - abortRequest._reject(storedError); - WritableStreamRejectCloseAndClosedPromiseIfNeeded(stream); - return; + if (v < 0) { + return false; } - const promise = stream._writableStreamController[AbortSteps](abortRequest._reason); - uponPromise(promise, () => { - abortRequest._resolve(); - WritableStreamRejectCloseAndClosedPromiseIfNeeded(stream); - return null; - }, (reason) => { - abortRequest._reject(reason); - WritableStreamRejectCloseAndClosedPromiseIfNeeded(stream); - return null; - }); - } - function WritableStreamFinishInFlightWrite(stream) { - stream._inFlightWriteRequest._resolve(void 0); - stream._inFlightWriteRequest = void 0; + return true; } - function WritableStreamFinishInFlightWriteWithError(stream, error) { - stream._inFlightWriteRequest._reject(error); - stream._inFlightWriteRequest = void 0; - WritableStreamDealWithRejection(stream, error); + function CloneAsUint8Array(O) { + const buffer = ArrayBufferSlice(O.buffer, O.byteOffset, O.byteOffset + O.byteLength); + return new Uint8Array(buffer); } - function WritableStreamFinishInFlightClose(stream) { - stream._inFlightCloseRequest._resolve(void 0); - stream._inFlightCloseRequest = void 0; - const state = stream._state; - if (state === "erroring") { - stream._storedError = void 0; - if (stream._pendingAbortRequest !== void 0) { - stream._pendingAbortRequest._resolve(); - stream._pendingAbortRequest = void 0; - } - } - stream._state = "closed"; - const writer = stream._writer; - if (writer !== void 0) { - defaultWriterClosedPromiseResolve(writer); + function DequeueValue(container) { + const pair = container._queue.shift(); + container._queueTotalSize -= pair.size; + if (container._queueTotalSize < 0) { + container._queueTotalSize = 0; } + return pair.value; } - function WritableStreamFinishInFlightCloseWithError(stream, error) { - stream._inFlightCloseRequest._reject(error); - stream._inFlightCloseRequest = void 0; - if (stream._pendingAbortRequest !== void 0) { - stream._pendingAbortRequest._reject(error); - stream._pendingAbortRequest = void 0; + function EnqueueValueWithSize(container, value, size) { + if (!IsNonNegativeNumber(size) || size === Infinity) { + throw new RangeError("Size must be a finite, non-NaN, non-negative number."); } - WritableStreamDealWithRejection(stream, error); + container._queue.push({ value, size }); + container._queueTotalSize += size; } - function WritableStreamCloseQueuedOrInFlight(stream) { - if (stream._closeRequest === void 0 && stream._inFlightCloseRequest === void 0) { - return false; - } - return true; + function PeekQueueValue(container) { + const pair = container._queue.peek(); + return pair.value; } - function WritableStreamHasOperationMarkedInFlight(stream) { - if (stream._inFlightWriteRequest === void 0 && stream._inFlightCloseRequest === void 0) { - return false; - } - return true; + function ResetQueue(container) { + container._queue = new SimpleQueue(); + container._queueTotalSize = 0; } - function WritableStreamMarkCloseRequestInFlight(stream) { - stream._inFlightCloseRequest = stream._closeRequest; - stream._closeRequest = void 0; + function isDataViewConstructor(ctor) { + return ctor === DataView; } - function WritableStreamMarkFirstWriteRequestInFlight(stream) { - stream._inFlightWriteRequest = stream._writeRequests.shift(); + function isDataView(view) { + return isDataViewConstructor(view.constructor); } - function WritableStreamRejectCloseAndClosedPromiseIfNeeded(stream) { - if (stream._closeRequest !== void 0) { - stream._closeRequest._reject(stream._storedError); - stream._closeRequest = void 0; - } - const writer = stream._writer; - if (writer !== void 0) { - defaultWriterClosedPromiseReject(writer, stream._storedError); + function arrayBufferViewElementSize(ctor) { + if (isDataViewConstructor(ctor)) { + return 1; } + return ctor.BYTES_PER_ELEMENT; } - function WritableStreamUpdateBackpressure(stream, backpressure) { - const writer = stream._writer; - if (writer !== void 0 && backpressure !== stream._backpressure) { - if (backpressure) { - defaultWriterReadyPromiseReset(writer); - } else { - defaultWriterReadyPromiseResolve(writer); + class ReadableStreamBYOBRequest { + constructor() { + throw new TypeError("Illegal constructor"); + } + /** + * Returns the view for writing in to, or `null` if the BYOB request has already been responded to. + */ + get view() { + if (!IsReadableStreamBYOBRequest(this)) { + throw byobRequestBrandCheckException("view"); } + return this._view; } - stream._backpressure = backpressure; - } - class WritableStreamDefaultWriter { - constructor(stream) { - assertRequiredArgument(stream, 1, "WritableStreamDefaultWriter"); - assertWritableStream(stream, "First parameter"); - if (IsWritableStreamLocked(stream)) { - throw new TypeError("This stream has already been locked for exclusive writing by another writer"); + respond(bytesWritten) { + if (!IsReadableStreamBYOBRequest(this)) { + throw byobRequestBrandCheckException("respond"); } - this._ownerWritableStream = stream; - stream._writer = this; - const state = stream._state; - if (state === "writable") { - if (!WritableStreamCloseQueuedOrInFlight(stream) && stream._backpressure) { - defaultWriterReadyPromiseInitialize(this); - } else { - defaultWriterReadyPromiseInitializeAsResolved(this); - } - defaultWriterClosedPromiseInitialize(this); - } else if (state === "erroring") { - defaultWriterReadyPromiseInitializeAsRejected(this, stream._storedError); - defaultWriterClosedPromiseInitialize(this); - } else if (state === "closed") { - defaultWriterReadyPromiseInitializeAsResolved(this); - defaultWriterClosedPromiseInitializeAsResolved(this); - } else { - const storedError = stream._storedError; - defaultWriterReadyPromiseInitializeAsRejected(this, storedError); - defaultWriterClosedPromiseInitializeAsRejected(this, storedError); + assertRequiredArgument(bytesWritten, 1, "respond"); + bytesWritten = convertUnsignedLongLongWithEnforceRange(bytesWritten, "First parameter"); + if (this._associatedReadableByteStreamController === void 0) { + throw new TypeError("This BYOB request has been invalidated"); } - } - /** - * Returns a promise that will be fulfilled when the stream becomes closed, or rejected if the stream ever errors or - * the writer’s lock is released before the stream finishes closing. - */ - get closed() { - if (!IsWritableStreamDefaultWriter(this)) { - return promiseRejectedWith(defaultWriterBrandCheckException("closed")); + if (IsDetachedBuffer(this._view.buffer)) { + throw new TypeError(`The BYOB request's buffer has been detached and so cannot be used as a response`); } - return this._closedPromise; + ReadableByteStreamControllerRespond(this._associatedReadableByteStreamController, bytesWritten); } - /** - * Returns the desired size to fill the stream’s internal queue. It can be negative, if the queue is over-full. - * A producer can use this information to determine the right amount of data to write. - * - * It will be `null` if the stream cannot be successfully written to (due to either being errored, or having an abort - * queued up). It will return zero if the stream is closed. And the getter will throw an exception if invoked when - * the writer’s lock is released. - */ - get desiredSize() { - if (!IsWritableStreamDefaultWriter(this)) { - throw defaultWriterBrandCheckException("desiredSize"); + respondWithNewView(view) { + if (!IsReadableStreamBYOBRequest(this)) { + throw byobRequestBrandCheckException("respondWithNewView"); } - if (this._ownerWritableStream === void 0) { - throw defaultWriterLockException("desiredSize"); + assertRequiredArgument(view, 1, "respondWithNewView"); + if (!ArrayBuffer.isView(view)) { + throw new TypeError("You can only respond with array buffer views"); } - return WritableStreamDefaultWriterGetDesiredSize(this); + if (this._associatedReadableByteStreamController === void 0) { + throw new TypeError("This BYOB request has been invalidated"); + } + if (IsDetachedBuffer(view.buffer)) { + throw new TypeError("The given view's buffer has been detached and so cannot be used as a response"); + } + ReadableByteStreamControllerRespondWithNewView(this._associatedReadableByteStreamController, view); + } + } + Object.defineProperties(ReadableStreamBYOBRequest.prototype, { + respond: { enumerable: true }, + respondWithNewView: { enumerable: true }, + view: { enumerable: true } + }); + setFunctionName(ReadableStreamBYOBRequest.prototype.respond, "respond"); + setFunctionName(ReadableStreamBYOBRequest.prototype.respondWithNewView, "respondWithNewView"); + if (typeof Symbol.toStringTag === "symbol") { + Object.defineProperty(ReadableStreamBYOBRequest.prototype, Symbol.toStringTag, { + value: "ReadableStreamBYOBRequest", + configurable: true + }); + } + class ReadableByteStreamController { + constructor() { + throw new TypeError("Illegal constructor"); } /** - * Returns a promise that will be fulfilled when the desired size to fill the stream’s internal queue transitions - * from non-positive to positive, signaling that it is no longer applying backpressure. Once the desired size dips - * back to zero or below, the getter will return a new promise that stays pending until the next transition. - * - * If the stream becomes errored or aborted, or the writer’s lock is released, the returned promise will become - * rejected. + * Returns the current BYOB pull request, or `null` if there isn't one. */ - get ready() { - if (!IsWritableStreamDefaultWriter(this)) { - return promiseRejectedWith(defaultWriterBrandCheckException("ready")); + get byobRequest() { + if (!IsReadableByteStreamController(this)) { + throw byteStreamControllerBrandCheckException("byobRequest"); } - return this._readyPromise; + return ReadableByteStreamControllerGetBYOBRequest(this); } /** - * If the reader is active, behaves the same as {@link WritableStream.abort | stream.abort(reason)}. + * Returns the desired size to fill the controlled stream's internal queue. It can be negative, if the queue is + * over-full. An underlying byte source ought to use this information to determine when and how to apply backpressure. */ - abort(reason = void 0) { - if (!IsWritableStreamDefaultWriter(this)) { - return promiseRejectedWith(defaultWriterBrandCheckException("abort")); - } - if (this._ownerWritableStream === void 0) { - return promiseRejectedWith(defaultWriterLockException("abort")); + get desiredSize() { + if (!IsReadableByteStreamController(this)) { + throw byteStreamControllerBrandCheckException("desiredSize"); } - return WritableStreamDefaultWriterAbort(this, reason); + return ReadableByteStreamControllerGetDesiredSize(this); } /** - * If the reader is active, behaves the same as {@link WritableStream.close | stream.close()}. + * Closes the controlled readable stream. Consumers will still be able to read any previously-enqueued chunks from + * the stream, but once those are read, the stream will become closed. */ close() { - if (!IsWritableStreamDefaultWriter(this)) { - return promiseRejectedWith(defaultWriterBrandCheckException("close")); + if (!IsReadableByteStreamController(this)) { + throw byteStreamControllerBrandCheckException("close"); } - const stream = this._ownerWritableStream; - if (stream === void 0) { - return promiseRejectedWith(defaultWriterLockException("close")); + if (this._closeRequested) { + throw new TypeError("The stream has already been closed; do not close it again!"); } - if (WritableStreamCloseQueuedOrInFlight(stream)) { - return promiseRejectedWith(new TypeError("Cannot close an already-closing stream")); + const state = this._controlledReadableByteStream._state; + if (state !== "readable") { + throw new TypeError(`The stream (in ${state} state) is not in the readable state and cannot be closed`); } - return WritableStreamDefaultWriterClose(this); + ReadableByteStreamControllerClose(this); } - /** - * Releases the writer’s lock on the corresponding stream. After the lock is released, the writer is no longer active. - * If the associated stream is errored when the lock is released, the writer will appear errored in the same way from - * now on; otherwise, the writer will appear closed. - * - * Note that the lock can still be released even if some ongoing writes have not yet finished (i.e. even if the - * promises returned from previous calls to {@link WritableStreamDefaultWriter.write | write()} have not yet settled). - * It’s not necessary to hold the lock on the writer for the duration of the write; the lock instead simply prevents - * other producers from writing in an interleaved manner. - */ - releaseLock() { - if (!IsWritableStreamDefaultWriter(this)) { - throw defaultWriterBrandCheckException("releaseLock"); - } - const stream = this._ownerWritableStream; - if (stream === void 0) { - return; + enqueue(chunk) { + if (!IsReadableByteStreamController(this)) { + throw byteStreamControllerBrandCheckException("enqueue"); } - WritableStreamDefaultWriterRelease(this); - } - write(chunk = void 0) { - if (!IsWritableStreamDefaultWriter(this)) { - return promiseRejectedWith(defaultWriterBrandCheckException("write")); + assertRequiredArgument(chunk, 1, "enqueue"); + if (!ArrayBuffer.isView(chunk)) { + throw new TypeError("chunk must be an array buffer view"); } - if (this._ownerWritableStream === void 0) { - return promiseRejectedWith(defaultWriterLockException("write to")); + if (chunk.byteLength === 0) { + throw new TypeError("chunk must have non-zero byteLength"); } - return WritableStreamDefaultWriterWrite(this, chunk); - } - } - Object.defineProperties(WritableStreamDefaultWriter.prototype, { - abort: { enumerable: true }, - close: { enumerable: true }, - releaseLock: { enumerable: true }, - write: { enumerable: true }, - closed: { enumerable: true }, - desiredSize: { enumerable: true }, - ready: { enumerable: true } - }); - setFunctionName(WritableStreamDefaultWriter.prototype.abort, "abort"); - setFunctionName(WritableStreamDefaultWriter.prototype.close, "close"); - setFunctionName(WritableStreamDefaultWriter.prototype.releaseLock, "releaseLock"); - setFunctionName(WritableStreamDefaultWriter.prototype.write, "write"); - if (typeof Symbol.toStringTag === "symbol") { - Object.defineProperty(WritableStreamDefaultWriter.prototype, Symbol.toStringTag, { - value: "WritableStreamDefaultWriter", - configurable: true - }); - } - function IsWritableStreamDefaultWriter(x2) { - if (!typeIsObject(x2)) { - return false; - } - if (!Object.prototype.hasOwnProperty.call(x2, "_ownerWritableStream")) { - return false; - } - return x2 instanceof WritableStreamDefaultWriter; - } - function WritableStreamDefaultWriterAbort(writer, reason) { - const stream = writer._ownerWritableStream; - return WritableStreamAbort(stream, reason); - } - function WritableStreamDefaultWriterClose(writer) { - const stream = writer._ownerWritableStream; - return WritableStreamClose(stream); - } - function WritableStreamDefaultWriterCloseWithErrorPropagation(writer) { - const stream = writer._ownerWritableStream; - const state = stream._state; - if (WritableStreamCloseQueuedOrInFlight(stream) || state === "closed") { - return promiseResolvedWith(void 0); - } - if (state === "errored") { - return promiseRejectedWith(stream._storedError); - } - return WritableStreamDefaultWriterClose(writer); - } - function WritableStreamDefaultWriterEnsureClosedPromiseRejected(writer, error) { - if (writer._closedPromiseState === "pending") { - defaultWriterClosedPromiseReject(writer, error); - } else { - defaultWriterClosedPromiseResetToRejected(writer, error); - } - } - function WritableStreamDefaultWriterEnsureReadyPromiseRejected(writer, error) { - if (writer._readyPromiseState === "pending") { - defaultWriterReadyPromiseReject(writer, error); - } else { - defaultWriterReadyPromiseResetToRejected(writer, error); - } - } - function WritableStreamDefaultWriterGetDesiredSize(writer) { - const stream = writer._ownerWritableStream; - const state = stream._state; - if (state === "errored" || state === "erroring") { - return null; - } - if (state === "closed") { - return 0; - } - return WritableStreamDefaultControllerGetDesiredSize(stream._writableStreamController); - } - function WritableStreamDefaultWriterRelease(writer) { - const stream = writer._ownerWritableStream; - const releasedError = new TypeError(`Writer was released and can no longer be used to monitor the stream's closedness`); - WritableStreamDefaultWriterEnsureReadyPromiseRejected(writer, releasedError); - WritableStreamDefaultWriterEnsureClosedPromiseRejected(writer, releasedError); - stream._writer = void 0; - writer._ownerWritableStream = void 0; - } - function WritableStreamDefaultWriterWrite(writer, chunk) { - const stream = writer._ownerWritableStream; - const controller = stream._writableStreamController; - const chunkSize = WritableStreamDefaultControllerGetChunkSize(controller, chunk); - if (stream !== writer._ownerWritableStream) { - return promiseRejectedWith(defaultWriterLockException("write to")); - } - const state = stream._state; - if (state === "errored") { - return promiseRejectedWith(stream._storedError); - } - if (WritableStreamCloseQueuedOrInFlight(stream) || state === "closed") { - return promiseRejectedWith(new TypeError("The stream is closing or closed and cannot be written to")); - } - if (state === "erroring") { - return promiseRejectedWith(stream._storedError); - } - const promise = WritableStreamAddWriteRequest(stream); - WritableStreamDefaultControllerWrite(controller, chunk, chunkSize); - return promise; - } - const closeSentinel = {}; - class WritableStreamDefaultController { - constructor() { - throw new TypeError("Illegal constructor"); - } - /** - * The reason which was passed to `WritableStream.abort(reason)` when the stream was aborted. - * - * @deprecated - * This property has been removed from the specification, see https://github.com/whatwg/streams/pull/1177. - * Use {@link WritableStreamDefaultController.signal}'s `reason` instead. - */ - get abortReason() { - if (!IsWritableStreamDefaultController(this)) { - throw defaultControllerBrandCheckException$2("abortReason"); + if (chunk.buffer.byteLength === 0) { + throw new TypeError(`chunk's buffer must have non-zero byteLength`); } - return this._abortReason; - } - /** - * An `AbortSignal` that can be used to abort the pending write or close operation when the stream is aborted. - */ - get signal() { - if (!IsWritableStreamDefaultController(this)) { - throw defaultControllerBrandCheckException$2("signal"); + if (this._closeRequested) { + throw new TypeError("stream is closed or draining"); } - if (this._abortController === void 0) { - throw new TypeError("WritableStreamDefaultController.prototype.signal is not supported"); + const state = this._controlledReadableByteStream._state; + if (state !== "readable") { + throw new TypeError(`The stream (in ${state} state) is not in the readable state and cannot be enqueued to`); } - return this._abortController.signal; + ReadableByteStreamControllerEnqueue(this, chunk); } /** - * Closes the controlled writable stream, making all future interactions with it fail with the given error `e`. - * - * This method is rarely used, since usually it suffices to return a rejected promise from one of the underlying - * sink's methods. However, it can be useful for suddenly shutting down a stream in response to an event outside the - * normal lifecycle of interactions with the underlying sink. + * Errors the controlled readable stream, making all future interactions with it fail with the given error `e`. */ error(e2 = void 0) { - if (!IsWritableStreamDefaultController(this)) { - throw defaultControllerBrandCheckException$2("error"); - } - const state = this._controlledWritableStream._state; - if (state !== "writable") { - return; + if (!IsReadableByteStreamController(this)) { + throw byteStreamControllerBrandCheckException("error"); } - WritableStreamDefaultControllerError(this, e2); + ReadableByteStreamControllerError(this, e2); } /** @internal */ - [AbortSteps](reason) { - const result = this._abortAlgorithm(reason); - WritableStreamDefaultControllerClearAlgorithms(this); + [CancelSteps](reason) { + ReadableByteStreamControllerClearPendingPullIntos(this); + ResetQueue(this); + const result = this._cancelAlgorithm(reason); + ReadableByteStreamControllerClearAlgorithms(this); return result; } /** @internal */ - [ErrorSteps]() { - ResetQueue(this); + [PullSteps](readRequest) { + const stream = this._controlledReadableByteStream; + if (this._queueTotalSize > 0) { + ReadableByteStreamControllerFillReadRequestFromQueue(this, readRequest); + return; + } + const autoAllocateChunkSize = this._autoAllocateChunkSize; + if (autoAllocateChunkSize !== void 0) { + let buffer; + try { + buffer = new ArrayBuffer(autoAllocateChunkSize); + } catch (bufferE) { + readRequest._errorSteps(bufferE); + return; + } + const pullIntoDescriptor = { + buffer, + bufferByteLength: autoAllocateChunkSize, + byteOffset: 0, + byteLength: autoAllocateChunkSize, + bytesFilled: 0, + minimumFill: 1, + elementSize: 1, + viewConstructor: Uint8Array, + readerType: "default" + }; + this._pendingPullIntos.push(pullIntoDescriptor); + } + ReadableStreamAddReadRequest(stream, readRequest); + ReadableByteStreamControllerCallPullIfNeeded(this); + } + /** @internal */ + [ReleaseSteps]() { + if (this._pendingPullIntos.length > 0) { + const firstPullInto = this._pendingPullIntos.peek(); + firstPullInto.readerType = "none"; + this._pendingPullIntos = new SimpleQueue(); + this._pendingPullIntos.push(firstPullInto); + } } } - Object.defineProperties(WritableStreamDefaultController.prototype, { - abortReason: { enumerable: true }, - signal: { enumerable: true }, - error: { enumerable: true } + Object.defineProperties(ReadableByteStreamController.prototype, { + close: { enumerable: true }, + enqueue: { enumerable: true }, + error: { enumerable: true }, + byobRequest: { enumerable: true }, + desiredSize: { enumerable: true } }); + setFunctionName(ReadableByteStreamController.prototype.close, "close"); + setFunctionName(ReadableByteStreamController.prototype.enqueue, "enqueue"); + setFunctionName(ReadableByteStreamController.prototype.error, "error"); if (typeof Symbol.toStringTag === "symbol") { - Object.defineProperty(WritableStreamDefaultController.prototype, Symbol.toStringTag, { - value: "WritableStreamDefaultController", + Object.defineProperty(ReadableByteStreamController.prototype, Symbol.toStringTag, { + value: "ReadableByteStreamController", configurable: true }); } - function IsWritableStreamDefaultController(x2) { + function IsReadableByteStreamController(x2) { if (!typeIsObject(x2)) { return false; } - if (!Object.prototype.hasOwnProperty.call(x2, "_controlledWritableStream")) { + if (!Object.prototype.hasOwnProperty.call(x2, "_controlledReadableByteStream")) { return false; } - return x2 instanceof WritableStreamDefaultController; + return x2 instanceof ReadableByteStreamController; } - function SetUpWritableStreamDefaultController(stream, controller, startAlgorithm, writeAlgorithm, closeAlgorithm, abortAlgorithm, highWaterMark, sizeAlgorithm) { - controller._controlledWritableStream = stream; - stream._writableStreamController = controller; - controller._queue = void 0; - controller._queueTotalSize = void 0; - ResetQueue(controller); - controller._abortReason = void 0; - controller._abortController = createAbortController(); - controller._started = false; - controller._strategySizeAlgorithm = sizeAlgorithm; - controller._strategyHWM = highWaterMark; - controller._writeAlgorithm = writeAlgorithm; - controller._closeAlgorithm = closeAlgorithm; - controller._abortAlgorithm = abortAlgorithm; - const backpressure = WritableStreamDefaultControllerGetBackpressure(controller); - WritableStreamUpdateBackpressure(stream, backpressure); - const startResult = startAlgorithm(); - const startPromise = promiseResolvedWith(startResult); - uponPromise(startPromise, () => { - controller._started = true; - WritableStreamDefaultControllerAdvanceQueueIfNeeded(controller); + function IsReadableStreamBYOBRequest(x2) { + if (!typeIsObject(x2)) { + return false; + } + if (!Object.prototype.hasOwnProperty.call(x2, "_associatedReadableByteStreamController")) { + return false; + } + return x2 instanceof ReadableStreamBYOBRequest; + } + function ReadableByteStreamControllerCallPullIfNeeded(controller) { + const shouldPull = ReadableByteStreamControllerShouldCallPull(controller); + if (!shouldPull) { + return; + } + if (controller._pulling) { + controller._pullAgain = true; + return; + } + controller._pulling = true; + const pullPromise = controller._pullAlgorithm(); + uponPromise(pullPromise, () => { + controller._pulling = false; + if (controller._pullAgain) { + controller._pullAgain = false; + ReadableByteStreamControllerCallPullIfNeeded(controller); + } return null; - }, (r2) => { - controller._started = true; - WritableStreamDealWithRejection(stream, r2); + }, (e2) => { + ReadableByteStreamControllerError(controller, e2); return null; }); } - function SetUpWritableStreamDefaultControllerFromUnderlyingSink(stream, underlyingSink, highWaterMark, sizeAlgorithm) { - const controller = Object.create(WritableStreamDefaultController.prototype); - let startAlgorithm; - let writeAlgorithm; - let closeAlgorithm; - let abortAlgorithm; - if (underlyingSink.start !== void 0) { - startAlgorithm = () => underlyingSink.start(controller); - } else { - startAlgorithm = () => void 0; - } - if (underlyingSink.write !== void 0) { - writeAlgorithm = (chunk) => underlyingSink.write(chunk, controller); - } else { - writeAlgorithm = () => promiseResolvedWith(void 0); - } - if (underlyingSink.close !== void 0) { - closeAlgorithm = () => underlyingSink.close(); - } else { - closeAlgorithm = () => promiseResolvedWith(void 0); + function ReadableByteStreamControllerClearPendingPullIntos(controller) { + ReadableByteStreamControllerInvalidateBYOBRequest(controller); + controller._pendingPullIntos = new SimpleQueue(); + } + function ReadableByteStreamControllerCommitPullIntoDescriptor(stream, pullIntoDescriptor) { + let done = false; + if (stream._state === "closed") { + done = true; } - if (underlyingSink.abort !== void 0) { - abortAlgorithm = (reason) => underlyingSink.abort(reason); + const filledView = ReadableByteStreamControllerConvertPullIntoDescriptor(pullIntoDescriptor); + if (pullIntoDescriptor.readerType === "default") { + ReadableStreamFulfillReadRequest(stream, filledView, done); } else { - abortAlgorithm = () => promiseResolvedWith(void 0); + ReadableStreamFulfillReadIntoRequest(stream, filledView, done); } - SetUpWritableStreamDefaultController(stream, controller, startAlgorithm, writeAlgorithm, closeAlgorithm, abortAlgorithm, highWaterMark, sizeAlgorithm); } - function WritableStreamDefaultControllerClearAlgorithms(controller) { - controller._writeAlgorithm = void 0; - controller._closeAlgorithm = void 0; - controller._abortAlgorithm = void 0; - controller._strategySizeAlgorithm = void 0; + function ReadableByteStreamControllerConvertPullIntoDescriptor(pullIntoDescriptor) { + const bytesFilled = pullIntoDescriptor.bytesFilled; + const elementSize = pullIntoDescriptor.elementSize; + return new pullIntoDescriptor.viewConstructor(pullIntoDescriptor.buffer, pullIntoDescriptor.byteOffset, bytesFilled / elementSize); } - function WritableStreamDefaultControllerClose(controller) { - EnqueueValueWithSize(controller, closeSentinel, 0); - WritableStreamDefaultControllerAdvanceQueueIfNeeded(controller); + function ReadableByteStreamControllerEnqueueChunkToQueue(controller, buffer, byteOffset, byteLength) { + controller._queue.push({ buffer, byteOffset, byteLength }); + controller._queueTotalSize += byteLength; } - function WritableStreamDefaultControllerGetChunkSize(controller, chunk) { + function ReadableByteStreamControllerEnqueueClonedChunkToQueue(controller, buffer, byteOffset, byteLength) { + let clonedChunk; try { - return controller._strategySizeAlgorithm(chunk); - } catch (chunkSizeE) { - WritableStreamDefaultControllerErrorIfNeeded(controller, chunkSizeE); - return 1; + clonedChunk = ArrayBufferSlice(buffer, byteOffset, byteOffset + byteLength); + } catch (cloneE) { + ReadableByteStreamControllerError(controller, cloneE); + throw cloneE; } + ReadableByteStreamControllerEnqueueChunkToQueue(controller, clonedChunk, 0, byteLength); } - function WritableStreamDefaultControllerGetDesiredSize(controller) { - return controller._strategyHWM - controller._queueTotalSize; + function ReadableByteStreamControllerEnqueueDetachedPullIntoToQueue(controller, firstDescriptor) { + if (firstDescriptor.bytesFilled > 0) { + ReadableByteStreamControllerEnqueueClonedChunkToQueue(controller, firstDescriptor.buffer, firstDescriptor.byteOffset, firstDescriptor.bytesFilled); + } + ReadableByteStreamControllerShiftPendingPullInto(controller); } - function WritableStreamDefaultControllerWrite(controller, chunk, chunkSize) { - try { - EnqueueValueWithSize(controller, chunk, chunkSize); - } catch (enqueueE) { - WritableStreamDefaultControllerErrorIfNeeded(controller, enqueueE); - return; + function ReadableByteStreamControllerFillPullIntoDescriptorFromQueue(controller, pullIntoDescriptor) { + const maxBytesToCopy = Math.min(controller._queueTotalSize, pullIntoDescriptor.byteLength - pullIntoDescriptor.bytesFilled); + const maxBytesFilled = pullIntoDescriptor.bytesFilled + maxBytesToCopy; + let totalBytesToCopyRemaining = maxBytesToCopy; + let ready = false; + const remainderBytes = maxBytesFilled % pullIntoDescriptor.elementSize; + const maxAlignedBytes = maxBytesFilled - remainderBytes; + if (maxAlignedBytes >= pullIntoDescriptor.minimumFill) { + totalBytesToCopyRemaining = maxAlignedBytes - pullIntoDescriptor.bytesFilled; + ready = true; } - const stream = controller._controlledWritableStream; - if (!WritableStreamCloseQueuedOrInFlight(stream) && stream._state === "writable") { - const backpressure = WritableStreamDefaultControllerGetBackpressure(controller); - WritableStreamUpdateBackpressure(stream, backpressure); + const queue = controller._queue; + while (totalBytesToCopyRemaining > 0) { + const headOfQueue = queue.peek(); + const bytesToCopy = Math.min(totalBytesToCopyRemaining, headOfQueue.byteLength); + const destStart = pullIntoDescriptor.byteOffset + pullIntoDescriptor.bytesFilled; + CopyDataBlockBytes(pullIntoDescriptor.buffer, destStart, headOfQueue.buffer, headOfQueue.byteOffset, bytesToCopy); + if (headOfQueue.byteLength === bytesToCopy) { + queue.shift(); + } else { + headOfQueue.byteOffset += bytesToCopy; + headOfQueue.byteLength -= bytesToCopy; + } + controller._queueTotalSize -= bytesToCopy; + ReadableByteStreamControllerFillHeadPullIntoDescriptor(controller, bytesToCopy, pullIntoDescriptor); + totalBytesToCopyRemaining -= bytesToCopy; } - WritableStreamDefaultControllerAdvanceQueueIfNeeded(controller); + return ready; } - function WritableStreamDefaultControllerAdvanceQueueIfNeeded(controller) { - const stream = controller._controlledWritableStream; - if (!controller._started) { + function ReadableByteStreamControllerFillHeadPullIntoDescriptor(controller, size, pullIntoDescriptor) { + pullIntoDescriptor.bytesFilled += size; + } + function ReadableByteStreamControllerHandleQueueDrain(controller) { + if (controller._queueTotalSize === 0 && controller._closeRequested) { + ReadableByteStreamControllerClearAlgorithms(controller); + ReadableStreamClose(controller._controlledReadableByteStream); + } else { + ReadableByteStreamControllerCallPullIfNeeded(controller); + } + } + function ReadableByteStreamControllerInvalidateBYOBRequest(controller) { + if (controller._byobRequest === null) { return; } - if (stream._inFlightWriteRequest !== void 0) { + controller._byobRequest._associatedReadableByteStreamController = void 0; + controller._byobRequest._view = null; + controller._byobRequest = null; + } + function ReadableByteStreamControllerProcessPullIntoDescriptorsUsingQueue(controller) { + while (controller._pendingPullIntos.length > 0) { + if (controller._queueTotalSize === 0) { + return; + } + const pullIntoDescriptor = controller._pendingPullIntos.peek(); + if (ReadableByteStreamControllerFillPullIntoDescriptorFromQueue(controller, pullIntoDescriptor)) { + ReadableByteStreamControllerShiftPendingPullInto(controller); + ReadableByteStreamControllerCommitPullIntoDescriptor(controller._controlledReadableByteStream, pullIntoDescriptor); + } + } + } + function ReadableByteStreamControllerProcessReadRequestsUsingQueue(controller) { + const reader = controller._controlledReadableByteStream._reader; + while (reader._readRequests.length > 0) { + if (controller._queueTotalSize === 0) { + return; + } + const readRequest = reader._readRequests.shift(); + ReadableByteStreamControllerFillReadRequestFromQueue(controller, readRequest); + } + } + function ReadableByteStreamControllerPullInto(controller, view, min, readIntoRequest) { + const stream = controller._controlledReadableByteStream; + const ctor = view.constructor; + const elementSize = arrayBufferViewElementSize(ctor); + const { byteOffset, byteLength } = view; + const minimumFill = min * elementSize; + let buffer; + try { + buffer = TransferArrayBuffer(view.buffer); + } catch (e2) { + readIntoRequest._errorSteps(e2); return; } - const state = stream._state; - if (state === "erroring") { - WritableStreamFinishErroring(stream); + const pullIntoDescriptor = { + buffer, + bufferByteLength: buffer.byteLength, + byteOffset, + byteLength, + bytesFilled: 0, + minimumFill, + elementSize, + viewConstructor: ctor, + readerType: "byob" + }; + if (controller._pendingPullIntos.length > 0) { + controller._pendingPullIntos.push(pullIntoDescriptor); + ReadableStreamAddReadIntoRequest(stream, readIntoRequest); return; } - if (controller._queue.length === 0) { + if (stream._state === "closed") { + const emptyView = new ctor(pullIntoDescriptor.buffer, pullIntoDescriptor.byteOffset, 0); + readIntoRequest._closeSteps(emptyView); return; } - const value = PeekQueueValue(controller); - if (value === closeSentinel) { - WritableStreamDefaultControllerProcessClose(controller); - } else { - WritableStreamDefaultControllerProcessWrite(controller, value); + if (controller._queueTotalSize > 0) { + if (ReadableByteStreamControllerFillPullIntoDescriptorFromQueue(controller, pullIntoDescriptor)) { + const filledView = ReadableByteStreamControllerConvertPullIntoDescriptor(pullIntoDescriptor); + ReadableByteStreamControllerHandleQueueDrain(controller); + readIntoRequest._chunkSteps(filledView); + return; + } + if (controller._closeRequested) { + const e2 = new TypeError("Insufficient bytes to fill elements in the given buffer"); + ReadableByteStreamControllerError(controller, e2); + readIntoRequest._errorSteps(e2); + return; + } } + controller._pendingPullIntos.push(pullIntoDescriptor); + ReadableStreamAddReadIntoRequest(stream, readIntoRequest); + ReadableByteStreamControllerCallPullIfNeeded(controller); } - function WritableStreamDefaultControllerErrorIfNeeded(controller, error) { - if (controller._controlledWritableStream._state === "writable") { - WritableStreamDefaultControllerError(controller, error); + function ReadableByteStreamControllerRespondInClosedState(controller, firstDescriptor) { + if (firstDescriptor.readerType === "none") { + ReadableByteStreamControllerShiftPendingPullInto(controller); + } + const stream = controller._controlledReadableByteStream; + if (ReadableStreamHasBYOBReader(stream)) { + while (ReadableStreamGetNumReadIntoRequests(stream) > 0) { + const pullIntoDescriptor = ReadableByteStreamControllerShiftPendingPullInto(controller); + ReadableByteStreamControllerCommitPullIntoDescriptor(stream, pullIntoDescriptor); + } } } - function WritableStreamDefaultControllerProcessClose(controller) { - const stream = controller._controlledWritableStream; - WritableStreamMarkCloseRequestInFlight(stream); - DequeueValue(controller); - const sinkClosePromise = controller._closeAlgorithm(); - WritableStreamDefaultControllerClearAlgorithms(controller); - uponPromise(sinkClosePromise, () => { - WritableStreamFinishInFlightClose(stream); - return null; - }, (reason) => { - WritableStreamFinishInFlightCloseWithError(stream, reason); - return null; - }); - } - function WritableStreamDefaultControllerProcessWrite(controller, chunk) { - const stream = controller._controlledWritableStream; - WritableStreamMarkFirstWriteRequestInFlight(stream); - const sinkWritePromise = controller._writeAlgorithm(chunk); - uponPromise(sinkWritePromise, () => { - WritableStreamFinishInFlightWrite(stream); - const state = stream._state; - DequeueValue(controller); - if (!WritableStreamCloseQueuedOrInFlight(stream) && state === "writable") { - const backpressure = WritableStreamDefaultControllerGetBackpressure(controller); - WritableStreamUpdateBackpressure(stream, backpressure); - } - WritableStreamDefaultControllerAdvanceQueueIfNeeded(controller); - return null; - }, (reason) => { - if (stream._state === "writable") { - WritableStreamDefaultControllerClearAlgorithms(controller); - } - WritableStreamFinishInFlightWriteWithError(stream, reason); - return null; - }); + function ReadableByteStreamControllerRespondInReadableState(controller, bytesWritten, pullIntoDescriptor) { + ReadableByteStreamControllerFillHeadPullIntoDescriptor(controller, bytesWritten, pullIntoDescriptor); + if (pullIntoDescriptor.readerType === "none") { + ReadableByteStreamControllerEnqueueDetachedPullIntoToQueue(controller, pullIntoDescriptor); + ReadableByteStreamControllerProcessPullIntoDescriptorsUsingQueue(controller); + return; + } + if (pullIntoDescriptor.bytesFilled < pullIntoDescriptor.minimumFill) { + return; + } + ReadableByteStreamControllerShiftPendingPullInto(controller); + const remainderSize = pullIntoDescriptor.bytesFilled % pullIntoDescriptor.elementSize; + if (remainderSize > 0) { + const end = pullIntoDescriptor.byteOffset + pullIntoDescriptor.bytesFilled; + ReadableByteStreamControllerEnqueueClonedChunkToQueue(controller, pullIntoDescriptor.buffer, end - remainderSize, remainderSize); + } + pullIntoDescriptor.bytesFilled -= remainderSize; + ReadableByteStreamControllerCommitPullIntoDescriptor(controller._controlledReadableByteStream, pullIntoDescriptor); + ReadableByteStreamControllerProcessPullIntoDescriptorsUsingQueue(controller); } - function WritableStreamDefaultControllerGetBackpressure(controller) { - const desiredSize = WritableStreamDefaultControllerGetDesiredSize(controller); - return desiredSize <= 0; + function ReadableByteStreamControllerRespondInternal(controller, bytesWritten) { + const firstDescriptor = controller._pendingPullIntos.peek(); + ReadableByteStreamControllerInvalidateBYOBRequest(controller); + const state = controller._controlledReadableByteStream._state; + if (state === "closed") { + ReadableByteStreamControllerRespondInClosedState(controller, firstDescriptor); + } else { + ReadableByteStreamControllerRespondInReadableState(controller, bytesWritten, firstDescriptor); + } + ReadableByteStreamControllerCallPullIfNeeded(controller); } - function WritableStreamDefaultControllerError(controller, error) { - const stream = controller._controlledWritableStream; - WritableStreamDefaultControllerClearAlgorithms(controller); - WritableStreamStartErroring(stream, error); + function ReadableByteStreamControllerShiftPendingPullInto(controller) { + const descriptor = controller._pendingPullIntos.shift(); + return descriptor; } - function streamBrandCheckException$2(name) { - return new TypeError(`WritableStream.prototype.${name} can only be used on a WritableStream`); + function ReadableByteStreamControllerShouldCallPull(controller) { + const stream = controller._controlledReadableByteStream; + if (stream._state !== "readable") { + return false; + } + if (controller._closeRequested) { + return false; + } + if (!controller._started) { + return false; + } + if (ReadableStreamHasDefaultReader(stream) && ReadableStreamGetNumReadRequests(stream) > 0) { + return true; + } + if (ReadableStreamHasBYOBReader(stream) && ReadableStreamGetNumReadIntoRequests(stream) > 0) { + return true; + } + const desiredSize = ReadableByteStreamControllerGetDesiredSize(controller); + if (desiredSize > 0) { + return true; + } + return false; } - function defaultControllerBrandCheckException$2(name) { - return new TypeError(`WritableStreamDefaultController.prototype.${name} can only be used on a WritableStreamDefaultController`); + function ReadableByteStreamControllerClearAlgorithms(controller) { + controller._pullAlgorithm = void 0; + controller._cancelAlgorithm = void 0; } - function defaultWriterBrandCheckException(name) { - return new TypeError(`WritableStreamDefaultWriter.prototype.${name} can only be used on a WritableStreamDefaultWriter`); + function ReadableByteStreamControllerClose(controller) { + const stream = controller._controlledReadableByteStream; + if (controller._closeRequested || stream._state !== "readable") { + return; + } + if (controller._queueTotalSize > 0) { + controller._closeRequested = true; + return; + } + if (controller._pendingPullIntos.length > 0) { + const firstPendingPullInto = controller._pendingPullIntos.peek(); + if (firstPendingPullInto.bytesFilled % firstPendingPullInto.elementSize !== 0) { + const e2 = new TypeError("Insufficient bytes to fill elements in the given buffer"); + ReadableByteStreamControllerError(controller, e2); + throw e2; + } + } + ReadableByteStreamControllerClearAlgorithms(controller); + ReadableStreamClose(stream); } - function defaultWriterLockException(name) { - return new TypeError("Cannot " + name + " a stream using a released writer"); + function ReadableByteStreamControllerEnqueue(controller, chunk) { + const stream = controller._controlledReadableByteStream; + if (controller._closeRequested || stream._state !== "readable") { + return; + } + const { buffer, byteOffset, byteLength } = chunk; + if (IsDetachedBuffer(buffer)) { + throw new TypeError("chunk's buffer is detached and so cannot be enqueued"); + } + const transferredBuffer = TransferArrayBuffer(buffer); + if (controller._pendingPullIntos.length > 0) { + const firstPendingPullInto = controller._pendingPullIntos.peek(); + if (IsDetachedBuffer(firstPendingPullInto.buffer)) { + throw new TypeError("The BYOB request's buffer has been detached and so cannot be filled with an enqueued chunk"); + } + ReadableByteStreamControllerInvalidateBYOBRequest(controller); + firstPendingPullInto.buffer = TransferArrayBuffer(firstPendingPullInto.buffer); + if (firstPendingPullInto.readerType === "none") { + ReadableByteStreamControllerEnqueueDetachedPullIntoToQueue(controller, firstPendingPullInto); + } + } + if (ReadableStreamHasDefaultReader(stream)) { + ReadableByteStreamControllerProcessReadRequestsUsingQueue(controller); + if (ReadableStreamGetNumReadRequests(stream) === 0) { + ReadableByteStreamControllerEnqueueChunkToQueue(controller, transferredBuffer, byteOffset, byteLength); + } else { + if (controller._pendingPullIntos.length > 0) { + ReadableByteStreamControllerShiftPendingPullInto(controller); + } + const transferredView = new Uint8Array(transferredBuffer, byteOffset, byteLength); + ReadableStreamFulfillReadRequest(stream, transferredView, false); + } + } else if (ReadableStreamHasBYOBReader(stream)) { + ReadableByteStreamControllerEnqueueChunkToQueue(controller, transferredBuffer, byteOffset, byteLength); + ReadableByteStreamControllerProcessPullIntoDescriptorsUsingQueue(controller); + } else { + ReadableByteStreamControllerEnqueueChunkToQueue(controller, transferredBuffer, byteOffset, byteLength); + } + ReadableByteStreamControllerCallPullIfNeeded(controller); } - function defaultWriterClosedPromiseInitialize(writer) { - writer._closedPromise = newPromise((resolve, reject) => { - writer._closedPromise_resolve = resolve; - writer._closedPromise_reject = reject; - writer._closedPromiseState = "pending"; - }); + function ReadableByteStreamControllerError(controller, e2) { + const stream = controller._controlledReadableByteStream; + if (stream._state !== "readable") { + return; + } + ReadableByteStreamControllerClearPendingPullIntos(controller); + ResetQueue(controller); + ReadableByteStreamControllerClearAlgorithms(controller); + ReadableStreamError(stream, e2); } - function defaultWriterClosedPromiseInitializeAsRejected(writer, reason) { - defaultWriterClosedPromiseInitialize(writer); - defaultWriterClosedPromiseReject(writer, reason); + function ReadableByteStreamControllerFillReadRequestFromQueue(controller, readRequest) { + const entry = controller._queue.shift(); + controller._queueTotalSize -= entry.byteLength; + ReadableByteStreamControllerHandleQueueDrain(controller); + const view = new Uint8Array(entry.buffer, entry.byteOffset, entry.byteLength); + readRequest._chunkSteps(view); } - function defaultWriterClosedPromiseInitializeAsResolved(writer) { - defaultWriterClosedPromiseInitialize(writer); - defaultWriterClosedPromiseResolve(writer); + function ReadableByteStreamControllerGetBYOBRequest(controller) { + if (controller._byobRequest === null && controller._pendingPullIntos.length > 0) { + const firstDescriptor = controller._pendingPullIntos.peek(); + const view = new Uint8Array(firstDescriptor.buffer, firstDescriptor.byteOffset + firstDescriptor.bytesFilled, firstDescriptor.byteLength - firstDescriptor.bytesFilled); + const byobRequest = Object.create(ReadableStreamBYOBRequest.prototype); + SetUpReadableStreamBYOBRequest(byobRequest, controller, view); + controller._byobRequest = byobRequest; + } + return controller._byobRequest; } - function defaultWriterClosedPromiseReject(writer, reason) { - if (writer._closedPromise_reject === void 0) { - return; + function ReadableByteStreamControllerGetDesiredSize(controller) { + const state = controller._controlledReadableByteStream._state; + if (state === "errored") { + return null; } - setPromiseIsHandledToTrue(writer._closedPromise); - writer._closedPromise_reject(reason); - writer._closedPromise_resolve = void 0; - writer._closedPromise_reject = void 0; - writer._closedPromiseState = "rejected"; + if (state === "closed") { + return 0; + } + return controller._strategyHWM - controller._queueTotalSize; } - function defaultWriterClosedPromiseResetToRejected(writer, reason) { - defaultWriterClosedPromiseInitializeAsRejected(writer, reason); + function ReadableByteStreamControllerRespond(controller, bytesWritten) { + const firstDescriptor = controller._pendingPullIntos.peek(); + const state = controller._controlledReadableByteStream._state; + if (state === "closed") { + if (bytesWritten !== 0) { + throw new TypeError("bytesWritten must be 0 when calling respond() on a closed stream"); + } + } else { + if (bytesWritten === 0) { + throw new TypeError("bytesWritten must be greater than 0 when calling respond() on a readable stream"); + } + if (firstDescriptor.bytesFilled + bytesWritten > firstDescriptor.byteLength) { + throw new RangeError("bytesWritten out of range"); + } + } + firstDescriptor.buffer = TransferArrayBuffer(firstDescriptor.buffer); + ReadableByteStreamControllerRespondInternal(controller, bytesWritten); } - function defaultWriterClosedPromiseResolve(writer) { - if (writer._closedPromise_resolve === void 0) { - return; + function ReadableByteStreamControllerRespondWithNewView(controller, view) { + const firstDescriptor = controller._pendingPullIntos.peek(); + const state = controller._controlledReadableByteStream._state; + if (state === "closed") { + if (view.byteLength !== 0) { + throw new TypeError("The view's length must be 0 when calling respondWithNewView() on a closed stream"); + } + } else { + if (view.byteLength === 0) { + throw new TypeError("The view's length must be greater than 0 when calling respondWithNewView() on a readable stream"); + } } - writer._closedPromise_resolve(void 0); - writer._closedPromise_resolve = void 0; - writer._closedPromise_reject = void 0; - writer._closedPromiseState = "resolved"; + if (firstDescriptor.byteOffset + firstDescriptor.bytesFilled !== view.byteOffset) { + throw new RangeError("The region specified by view does not match byobRequest"); + } + if (firstDescriptor.bufferByteLength !== view.buffer.byteLength) { + throw new RangeError("The buffer of view has different capacity than byobRequest"); + } + if (firstDescriptor.bytesFilled + view.byteLength > firstDescriptor.byteLength) { + throw new RangeError("The region specified by view is larger than byobRequest"); + } + const viewByteLength = view.byteLength; + firstDescriptor.buffer = TransferArrayBuffer(view.buffer); + ReadableByteStreamControllerRespondInternal(controller, viewByteLength); } - function defaultWriterReadyPromiseInitialize(writer) { - writer._readyPromise = newPromise((resolve, reject) => { - writer._readyPromise_resolve = resolve; - writer._readyPromise_reject = reject; + function SetUpReadableByteStreamController(stream, controller, startAlgorithm, pullAlgorithm, cancelAlgorithm, highWaterMark, autoAllocateChunkSize) { + controller._controlledReadableByteStream = stream; + controller._pullAgain = false; + controller._pulling = false; + controller._byobRequest = null; + controller._queue = controller._queueTotalSize = void 0; + ResetQueue(controller); + controller._closeRequested = false; + controller._started = false; + controller._strategyHWM = highWaterMark; + controller._pullAlgorithm = pullAlgorithm; + controller._cancelAlgorithm = cancelAlgorithm; + controller._autoAllocateChunkSize = autoAllocateChunkSize; + controller._pendingPullIntos = new SimpleQueue(); + stream._readableStreamController = controller; + const startResult = startAlgorithm(); + uponPromise(promiseResolvedWith(startResult), () => { + controller._started = true; + ReadableByteStreamControllerCallPullIfNeeded(controller); + return null; + }, (r2) => { + ReadableByteStreamControllerError(controller, r2); + return null; }); - writer._readyPromiseState = "pending"; } - function defaultWriterReadyPromiseInitializeAsRejected(writer, reason) { - defaultWriterReadyPromiseInitialize(writer); - defaultWriterReadyPromiseReject(writer, reason); + function SetUpReadableByteStreamControllerFromUnderlyingSource(stream, underlyingByteSource, highWaterMark) { + const controller = Object.create(ReadableByteStreamController.prototype); + let startAlgorithm; + let pullAlgorithm; + let cancelAlgorithm; + if (underlyingByteSource.start !== void 0) { + startAlgorithm = () => underlyingByteSource.start(controller); + } else { + startAlgorithm = () => void 0; + } + if (underlyingByteSource.pull !== void 0) { + pullAlgorithm = () => underlyingByteSource.pull(controller); + } else { + pullAlgorithm = () => promiseResolvedWith(void 0); + } + if (underlyingByteSource.cancel !== void 0) { + cancelAlgorithm = (reason) => underlyingByteSource.cancel(reason); + } else { + cancelAlgorithm = () => promiseResolvedWith(void 0); + } + const autoAllocateChunkSize = underlyingByteSource.autoAllocateChunkSize; + if (autoAllocateChunkSize === 0) { + throw new TypeError("autoAllocateChunkSize must be greater than 0"); + } + SetUpReadableByteStreamController(stream, controller, startAlgorithm, pullAlgorithm, cancelAlgorithm, highWaterMark, autoAllocateChunkSize); } - function defaultWriterReadyPromiseInitializeAsResolved(writer) { - defaultWriterReadyPromiseInitialize(writer); - defaultWriterReadyPromiseResolve(writer); + function SetUpReadableStreamBYOBRequest(request3, controller, view) { + request3._associatedReadableByteStreamController = controller; + request3._view = view; } - function defaultWriterReadyPromiseReject(writer, reason) { - if (writer._readyPromise_reject === void 0) { - return; - } - setPromiseIsHandledToTrue(writer._readyPromise); - writer._readyPromise_reject(reason); - writer._readyPromise_resolve = void 0; - writer._readyPromise_reject = void 0; - writer._readyPromiseState = "rejected"; + function byobRequestBrandCheckException(name) { + return new TypeError(`ReadableStreamBYOBRequest.prototype.${name} can only be used on a ReadableStreamBYOBRequest`); } - function defaultWriterReadyPromiseReset(writer) { - defaultWriterReadyPromiseInitialize(writer); + function byteStreamControllerBrandCheckException(name) { + return new TypeError(`ReadableByteStreamController.prototype.${name} can only be used on a ReadableByteStreamController`); } - function defaultWriterReadyPromiseResetToRejected(writer, reason) { - defaultWriterReadyPromiseInitializeAsRejected(writer, reason); + function convertReaderOptions(options, context2) { + assertDictionary(options, context2); + const mode = options === null || options === void 0 ? void 0 : options.mode; + return { + mode: mode === void 0 ? void 0 : convertReadableStreamReaderMode(mode, `${context2} has member 'mode' that`) + }; } - function defaultWriterReadyPromiseResolve(writer) { - if (writer._readyPromise_resolve === void 0) { - return; + function convertReadableStreamReaderMode(mode, context2) { + mode = `${mode}`; + if (mode !== "byob") { + throw new TypeError(`${context2} '${mode}' is not a valid enumeration value for ReadableStreamReaderMode`); } - writer._readyPromise_resolve(void 0); - writer._readyPromise_resolve = void 0; - writer._readyPromise_reject = void 0; - writer._readyPromiseState = "fulfilled"; + return mode; } - function getGlobals() { - if (typeof globalThis !== "undefined") { - return globalThis; - } else if (typeof self !== "undefined") { - return self; - } else if (typeof global !== "undefined") { - return global; - } - return void 0; + function convertByobReadOptions(options, context2) { + var _a3; + assertDictionary(options, context2); + const min = (_a3 = options === null || options === void 0 ? void 0 : options.min) !== null && _a3 !== void 0 ? _a3 : 1; + return { + min: convertUnsignedLongLongWithEnforceRange(min, `${context2} has member 'min' that`) + }; } - const globals = getGlobals(); - function isDOMExceptionConstructor(ctor) { - if (!(typeof ctor === "function" || typeof ctor === "object")) { - return false; + function AcquireReadableStreamBYOBReader(stream) { + return new ReadableStreamBYOBReader(stream); + } + function ReadableStreamAddReadIntoRequest(stream, readIntoRequest) { + stream._reader._readIntoRequests.push(readIntoRequest); + } + function ReadableStreamFulfillReadIntoRequest(stream, chunk, done) { + const reader = stream._reader; + const readIntoRequest = reader._readIntoRequests.shift(); + if (done) { + readIntoRequest._closeSteps(chunk); + } else { + readIntoRequest._chunkSteps(chunk); } - if (ctor.name !== "DOMException") { + } + function ReadableStreamGetNumReadIntoRequests(stream) { + return stream._reader._readIntoRequests.length; + } + function ReadableStreamHasBYOBReader(stream) { + const reader = stream._reader; + if (reader === void 0) { return false; } - try { - new ctor(); - return true; - } catch (_a3) { + if (!IsReadableStreamBYOBReader(reader)) { return false; } + return true; } - function getFromGlobal() { - const ctor = globals === null || globals === void 0 ? void 0 : globals.DOMException; - return isDOMExceptionConstructor(ctor) ? ctor : void 0; - } - function createPolyfill() { - const ctor = function DOMException4(message, name) { - this.message = message || ""; - this.name = name || "Error"; - if (Error.captureStackTrace) { - Error.captureStackTrace(this, this.constructor); + class ReadableStreamBYOBReader { + constructor(stream) { + assertRequiredArgument(stream, 1, "ReadableStreamBYOBReader"); + assertReadableStream(stream, "First parameter"); + if (IsReadableStreamLocked(stream)) { + throw new TypeError("This stream has already been locked for exclusive reading by another reader"); } - }; - setFunctionName(ctor, "DOMException"); - ctor.prototype = Object.create(Error.prototype); - Object.defineProperty(ctor.prototype, "constructor", { value: ctor, writable: true, configurable: true }); - return ctor; - } - const DOMException3 = getFromGlobal() || createPolyfill(); - function ReadableStreamPipeTo(source, dest, preventClose, preventAbort, preventCancel, signal) { - const reader = AcquireReadableStreamDefaultReader(source); - const writer = AcquireWritableStreamDefaultWriter(dest); - source._disturbed = true; - let shuttingDown = false; - let currentWrite = promiseResolvedWith(void 0); - return newPromise((resolve, reject) => { - let abortAlgorithm; - if (signal !== void 0) { - abortAlgorithm = () => { - const error = signal.reason !== void 0 ? signal.reason : new DOMException3("Aborted", "AbortError"); - const actions = []; - if (!preventAbort) { - actions.push(() => { - if (dest._state === "writable") { - return WritableStreamAbort(dest, error); - } - return promiseResolvedWith(void 0); - }); - } - if (!preventCancel) { - actions.push(() => { - if (source._state === "readable") { - return ReadableStreamCancel(source, error); - } - return promiseResolvedWith(void 0); - }); - } - shutdownWithAction(() => Promise.all(actions.map((action) => action())), true, error); - }; - if (signal.aborted) { - abortAlgorithm(); - return; - } - signal.addEventListener("abort", abortAlgorithm); + if (!IsReadableByteStreamController(stream._readableStreamController)) { + throw new TypeError("Cannot construct a ReadableStreamBYOBReader for a stream not constructed with a byte source"); } - function pipeLoop() { - return newPromise((resolveLoop, rejectLoop) => { - function next(done) { - if (done) { - resolveLoop(); - } else { - PerformPromiseThen(pipeStep(), next, rejectLoop); - } - } - next(false); - }); + ReadableStreamReaderGenericInitialize(this, stream); + this._readIntoRequests = new SimpleQueue(); + } + /** + * Returns a promise that will be fulfilled when the stream becomes closed, or rejected if the stream ever errors or + * the reader's lock is released before the stream finishes closing. + */ + get closed() { + if (!IsReadableStreamBYOBReader(this)) { + return promiseRejectedWith(byobReaderBrandCheckException("closed")); } - function pipeStep() { - if (shuttingDown) { - return promiseResolvedWith(true); - } - return PerformPromiseThen(writer._readyPromise, () => { - return newPromise((resolveRead, rejectRead) => { - ReadableStreamDefaultReaderRead(reader, { - _chunkSteps: (chunk) => { - currentWrite = PerformPromiseThen(WritableStreamDefaultWriterWrite(writer, chunk), void 0, noop3); - resolveRead(false); - }, - _closeSteps: () => resolveRead(true), - _errorSteps: rejectRead - }); - }); - }); + return this._closedPromise; + } + /** + * If the reader is active, behaves the same as {@link ReadableStream.cancel | stream.cancel(reason)}. + */ + cancel(reason = void 0) { + if (!IsReadableStreamBYOBReader(this)) { + return promiseRejectedWith(byobReaderBrandCheckException("cancel")); } - isOrBecomesErrored(source, reader._closedPromise, (storedError) => { - if (!preventAbort) { - shutdownWithAction(() => WritableStreamAbort(dest, storedError), true, storedError); - } else { - shutdown(true, storedError); - } - return null; - }); - isOrBecomesErrored(dest, writer._closedPromise, (storedError) => { - if (!preventCancel) { - shutdownWithAction(() => ReadableStreamCancel(source, storedError), true, storedError); - } else { - shutdown(true, storedError); - } - return null; - }); - isOrBecomesClosed(source, reader._closedPromise, () => { - if (!preventClose) { - shutdownWithAction(() => WritableStreamDefaultWriterCloseWithErrorPropagation(writer)); - } else { - shutdown(); - } - return null; - }); - if (WritableStreamCloseQueuedOrInFlight(dest) || dest._state === "closed") { - const destClosed = new TypeError("the destination writable stream closed before all data could be piped to it"); - if (!preventCancel) { - shutdownWithAction(() => ReadableStreamCancel(source, destClosed), true, destClosed); - } else { - shutdown(true, destClosed); - } - } - setPromiseIsHandledToTrue(pipeLoop()); - function waitForWritesToFinish() { - const oldCurrentWrite = currentWrite; - return PerformPromiseThen(currentWrite, () => oldCurrentWrite !== currentWrite ? waitForWritesToFinish() : void 0); - } - function isOrBecomesErrored(stream, promise, action) { - if (stream._state === "errored") { - action(stream._storedError); - } else { - uponRejection(promise, action); - } + if (this._ownerReadableStream === void 0) { + return promiseRejectedWith(readerLockException("cancel")); } - function isOrBecomesClosed(stream, promise, action) { - if (stream._state === "closed") { - action(); - } else { - uponFulfillment(promise, action); - } + return ReadableStreamReaderGenericCancel(this, reason); + } + read(view, rawOptions = {}) { + if (!IsReadableStreamBYOBReader(this)) { + return promiseRejectedWith(byobReaderBrandCheckException("read")); } - function shutdownWithAction(action, originalIsError, originalError) { - if (shuttingDown) { - return; - } - shuttingDown = true; - if (dest._state === "writable" && !WritableStreamCloseQueuedOrInFlight(dest)) { - uponFulfillment(waitForWritesToFinish(), doTheRest); - } else { - doTheRest(); - } - function doTheRest() { - uponPromise(action(), () => finalize(originalIsError, originalError), (newError) => finalize(true, newError)); - return null; - } + if (!ArrayBuffer.isView(view)) { + return promiseRejectedWith(new TypeError("view must be an array buffer view")); } - function shutdown(isError, error) { - if (shuttingDown) { - return; - } - shuttingDown = true; - if (dest._state === "writable" && !WritableStreamCloseQueuedOrInFlight(dest)) { - uponFulfillment(waitForWritesToFinish(), () => finalize(isError, error)); - } else { - finalize(isError, error); - } + if (view.byteLength === 0) { + return promiseRejectedWith(new TypeError("view must have non-zero byteLength")); } - function finalize(isError, error) { - WritableStreamDefaultWriterRelease(writer); - ReadableStreamReaderGenericRelease(reader); - if (signal !== void 0) { - signal.removeEventListener("abort", abortAlgorithm); - } - if (isError) { - reject(error); - } else { - resolve(void 0); - } - return null; + if (view.buffer.byteLength === 0) { + return promiseRejectedWith(new TypeError(`view's buffer must have non-zero byteLength`)); } - }); - } - class ReadableStreamDefaultController { - constructor() { - throw new TypeError("Illegal constructor"); - } - /** - * Returns the desired size to fill the controlled stream's internal queue. It can be negative, if the queue is - * over-full. An underlying source ought to use this information to determine when and how to apply backpressure. - */ - get desiredSize() { - if (!IsReadableStreamDefaultController(this)) { - throw defaultControllerBrandCheckException$1("desiredSize"); + if (IsDetachedBuffer(view.buffer)) { + return promiseRejectedWith(new TypeError("view's buffer has been detached")); } - return ReadableStreamDefaultControllerGetDesiredSize(this); - } - /** - * Closes the controlled readable stream. Consumers will still be able to read any previously-enqueued chunks from - * the stream, but once those are read, the stream will become closed. - */ - close() { - if (!IsReadableStreamDefaultController(this)) { - throw defaultControllerBrandCheckException$1("close"); + let options; + try { + options = convertByobReadOptions(rawOptions, "options"); + } catch (e2) { + return promiseRejectedWith(e2); } - if (!ReadableStreamDefaultControllerCanCloseOrEnqueue(this)) { - throw new TypeError("The stream is not in a state that permits close"); + const min = options.min; + if (min === 0) { + return promiseRejectedWith(new TypeError("options.min must be greater than 0")); } - ReadableStreamDefaultControllerClose(this); - } - enqueue(chunk = void 0) { - if (!IsReadableStreamDefaultController(this)) { - throw defaultControllerBrandCheckException$1("enqueue"); + if (!isDataView(view)) { + if (min > view.length) { + return promiseRejectedWith(new RangeError("options.min must be less than or equal to view's length")); + } + } else if (min > view.byteLength) { + return promiseRejectedWith(new RangeError("options.min must be less than or equal to view's byteLength")); } - if (!ReadableStreamDefaultControllerCanCloseOrEnqueue(this)) { - throw new TypeError("The stream is not in a state that permits enqueue"); + if (this._ownerReadableStream === void 0) { + return promiseRejectedWith(readerLockException("read from")); } - return ReadableStreamDefaultControllerEnqueue(this, chunk); + let resolvePromise; + let rejectPromise; + const promise = newPromise((resolve, reject) => { + resolvePromise = resolve; + rejectPromise = reject; + }); + const readIntoRequest = { + _chunkSteps: (chunk) => resolvePromise({ value: chunk, done: false }), + _closeSteps: (chunk) => resolvePromise({ value: chunk, done: true }), + _errorSteps: (e2) => rejectPromise(e2) + }; + ReadableStreamBYOBReaderRead(this, view, min, readIntoRequest); + return promise; } /** - * Errors the controlled readable stream, making all future interactions with it fail with the given error `e`. + * Releases the reader's lock on the corresponding stream. After the lock is released, the reader is no longer active. + * If the associated stream is errored when the lock is released, the reader will appear errored in the same way + * from now on; otherwise, the reader will appear closed. + * + * A reader's lock cannot be released while it still has a pending read request, i.e., if a promise returned by + * the reader's {@link ReadableStreamBYOBReader.read | read()} method has not yet been settled. Attempting to + * do so will throw a `TypeError` and leave the reader locked to the stream. */ - error(e2 = void 0) { - if (!IsReadableStreamDefaultController(this)) { - throw defaultControllerBrandCheckException$1("error"); + releaseLock() { + if (!IsReadableStreamBYOBReader(this)) { + throw byobReaderBrandCheckException("releaseLock"); } - ReadableStreamDefaultControllerError(this, e2); - } - /** @internal */ - [CancelSteps](reason) { - ResetQueue(this); - const result = this._cancelAlgorithm(reason); - ReadableStreamDefaultControllerClearAlgorithms(this); - return result; - } - /** @internal */ - [PullSteps](readRequest) { - const stream = this._controlledReadableStream; - if (this._queue.length > 0) { - const chunk = DequeueValue(this); - if (this._closeRequested && this._queue.length === 0) { - ReadableStreamDefaultControllerClearAlgorithms(this); - ReadableStreamClose(stream); - } else { - ReadableStreamDefaultControllerCallPullIfNeeded(this); - } - readRequest._chunkSteps(chunk); - } else { - ReadableStreamAddReadRequest(stream, readRequest); - ReadableStreamDefaultControllerCallPullIfNeeded(this); + if (this._ownerReadableStream === void 0) { + return; } - } - /** @internal */ - [ReleaseSteps]() { + ReadableStreamBYOBReaderRelease(this); } } - Object.defineProperties(ReadableStreamDefaultController.prototype, { - close: { enumerable: true }, - enqueue: { enumerable: true }, - error: { enumerable: true }, - desiredSize: { enumerable: true } + Object.defineProperties(ReadableStreamBYOBReader.prototype, { + cancel: { enumerable: true }, + read: { enumerable: true }, + releaseLock: { enumerable: true }, + closed: { enumerable: true } }); - setFunctionName(ReadableStreamDefaultController.prototype.close, "close"); - setFunctionName(ReadableStreamDefaultController.prototype.enqueue, "enqueue"); - setFunctionName(ReadableStreamDefaultController.prototype.error, "error"); + setFunctionName(ReadableStreamBYOBReader.prototype.cancel, "cancel"); + setFunctionName(ReadableStreamBYOBReader.prototype.read, "read"); + setFunctionName(ReadableStreamBYOBReader.prototype.releaseLock, "releaseLock"); if (typeof Symbol.toStringTag === "symbol") { - Object.defineProperty(ReadableStreamDefaultController.prototype, Symbol.toStringTag, { - value: "ReadableStreamDefaultController", + Object.defineProperty(ReadableStreamBYOBReader.prototype, Symbol.toStringTag, { + value: "ReadableStreamBYOBReader", configurable: true }); } - function IsReadableStreamDefaultController(x2) { + function IsReadableStreamBYOBReader(x2) { if (!typeIsObject(x2)) { return false; } - if (!Object.prototype.hasOwnProperty.call(x2, "_controlledReadableStream")) { + if (!Object.prototype.hasOwnProperty.call(x2, "_readIntoRequests")) { return false; } - return x2 instanceof ReadableStreamDefaultController; + return x2 instanceof ReadableStreamBYOBReader; } - function ReadableStreamDefaultControllerCallPullIfNeeded(controller) { - const shouldPull = ReadableStreamDefaultControllerShouldCallPull(controller); - if (!shouldPull) { - return; - } - if (controller._pulling) { - controller._pullAgain = true; - return; + function ReadableStreamBYOBReaderRead(reader, view, min, readIntoRequest) { + const stream = reader._ownerReadableStream; + stream._disturbed = true; + if (stream._state === "errored") { + readIntoRequest._errorSteps(stream._storedError); + } else { + ReadableByteStreamControllerPullInto(stream._readableStreamController, view, min, readIntoRequest); } - controller._pulling = true; - const pullPromise = controller._pullAlgorithm(); - uponPromise(pullPromise, () => { - controller._pulling = false; - if (controller._pullAgain) { - controller._pullAgain = false; - ReadableStreamDefaultControllerCallPullIfNeeded(controller); - } - return null; - }, (e2) => { - ReadableStreamDefaultControllerError(controller, e2); - return null; - }); } - function ReadableStreamDefaultControllerShouldCallPull(controller) { - const stream = controller._controlledReadableStream; - if (!ReadableStreamDefaultControllerCanCloseOrEnqueue(controller)) { - return false; - } - if (!controller._started) { - return false; - } - if (IsReadableStreamLocked(stream) && ReadableStreamGetNumReadRequests(stream) > 0) { - return true; - } - const desiredSize = ReadableStreamDefaultControllerGetDesiredSize(controller); - if (desiredSize > 0) { - return true; - } - return false; + function ReadableStreamBYOBReaderRelease(reader) { + ReadableStreamReaderGenericRelease(reader); + const e2 = new TypeError("Reader was released"); + ReadableStreamBYOBReaderErrorReadIntoRequests(reader, e2); } - function ReadableStreamDefaultControllerClearAlgorithms(controller) { - controller._pullAlgorithm = void 0; - controller._cancelAlgorithm = void 0; - controller._strategySizeAlgorithm = void 0; + function ReadableStreamBYOBReaderErrorReadIntoRequests(reader, e2) { + const readIntoRequests = reader._readIntoRequests; + reader._readIntoRequests = new SimpleQueue(); + readIntoRequests.forEach((readIntoRequest) => { + readIntoRequest._errorSteps(e2); + }); } - function ReadableStreamDefaultControllerClose(controller) { - if (!ReadableStreamDefaultControllerCanCloseOrEnqueue(controller)) { - return; - } - const stream = controller._controlledReadableStream; - controller._closeRequested = true; - if (controller._queue.length === 0) { - ReadableStreamDefaultControllerClearAlgorithms(controller); - ReadableStreamClose(stream); - } + function byobReaderBrandCheckException(name) { + return new TypeError(`ReadableStreamBYOBReader.prototype.${name} can only be used on a ReadableStreamBYOBReader`); } - function ReadableStreamDefaultControllerEnqueue(controller, chunk) { - if (!ReadableStreamDefaultControllerCanCloseOrEnqueue(controller)) { - return; + function ExtractHighWaterMark(strategy, defaultHWM) { + const { highWaterMark } = strategy; + if (highWaterMark === void 0) { + return defaultHWM; } - const stream = controller._controlledReadableStream; - if (IsReadableStreamLocked(stream) && ReadableStreamGetNumReadRequests(stream) > 0) { - ReadableStreamFulfillReadRequest(stream, chunk, false); - } else { - let chunkSize; - try { - chunkSize = controller._strategySizeAlgorithm(chunk); - } catch (chunkSizeE) { - ReadableStreamDefaultControllerError(controller, chunkSizeE); - throw chunkSizeE; - } - try { - EnqueueValueWithSize(controller, chunk, chunkSize); - } catch (enqueueE) { - ReadableStreamDefaultControllerError(controller, enqueueE); - throw enqueueE; - } + if (NumberIsNaN(highWaterMark) || highWaterMark < 0) { + throw new RangeError("Invalid highWaterMark"); } - ReadableStreamDefaultControllerCallPullIfNeeded(controller); + return highWaterMark; } - function ReadableStreamDefaultControllerError(controller, e2) { - const stream = controller._controlledReadableStream; - if (stream._state !== "readable") { - return; + function ExtractSizeAlgorithm(strategy) { + const { size } = strategy; + if (!size) { + return () => 1; } - ResetQueue(controller); - ReadableStreamDefaultControllerClearAlgorithms(controller); - ReadableStreamError(stream, e2); + return size; } - function ReadableStreamDefaultControllerGetDesiredSize(controller) { - const state = controller._controlledReadableStream._state; - if (state === "errored") { - return null; - } - if (state === "closed") { - return 0; - } - return controller._strategyHWM - controller._queueTotalSize; + function convertQueuingStrategy(init, context2) { + assertDictionary(init, context2); + const highWaterMark = init === null || init === void 0 ? void 0 : init.highWaterMark; + const size = init === null || init === void 0 ? void 0 : init.size; + return { + highWaterMark: highWaterMark === void 0 ? void 0 : convertUnrestrictedDouble(highWaterMark), + size: size === void 0 ? void 0 : convertQueuingStrategySize(size, `${context2} has member 'size' that`) + }; } - function ReadableStreamDefaultControllerHasBackpressure(controller) { - if (ReadableStreamDefaultControllerShouldCallPull(controller)) { - return false; - } - return true; + function convertQueuingStrategySize(fn, context2) { + assertFunction(fn, context2); + return (chunk) => convertUnrestrictedDouble(fn(chunk)); } - function ReadableStreamDefaultControllerCanCloseOrEnqueue(controller) { - const state = controller._controlledReadableStream._state; - if (!controller._closeRequested && state === "readable") { - return true; - } - return false; + function convertUnderlyingSink(original, context2) { + assertDictionary(original, context2); + const abort = original === null || original === void 0 ? void 0 : original.abort; + const close = original === null || original === void 0 ? void 0 : original.close; + const start = original === null || original === void 0 ? void 0 : original.start; + const type = original === null || original === void 0 ? void 0 : original.type; + const write = original === null || original === void 0 ? void 0 : original.write; + return { + abort: abort === void 0 ? void 0 : convertUnderlyingSinkAbortCallback(abort, original, `${context2} has member 'abort' that`), + close: close === void 0 ? void 0 : convertUnderlyingSinkCloseCallback(close, original, `${context2} has member 'close' that`), + start: start === void 0 ? void 0 : convertUnderlyingSinkStartCallback(start, original, `${context2} has member 'start' that`), + write: write === void 0 ? void 0 : convertUnderlyingSinkWriteCallback(write, original, `${context2} has member 'write' that`), + type + }; } - function SetUpReadableStreamDefaultController(stream, controller, startAlgorithm, pullAlgorithm, cancelAlgorithm, highWaterMark, sizeAlgorithm) { - controller._controlledReadableStream = stream; - controller._queue = void 0; - controller._queueTotalSize = void 0; - ResetQueue(controller); - controller._started = false; - controller._closeRequested = false; - controller._pullAgain = false; - controller._pulling = false; - controller._strategySizeAlgorithm = sizeAlgorithm; - controller._strategyHWM = highWaterMark; - controller._pullAlgorithm = pullAlgorithm; - controller._cancelAlgorithm = cancelAlgorithm; - stream._readableStreamController = controller; - const startResult = startAlgorithm(); - uponPromise(promiseResolvedWith(startResult), () => { - controller._started = true; - ReadableStreamDefaultControllerCallPullIfNeeded(controller); - return null; - }, (r2) => { - ReadableStreamDefaultControllerError(controller, r2); - return null; - }); + function convertUnderlyingSinkAbortCallback(fn, original, context2) { + assertFunction(fn, context2); + return (reason) => promiseCall(fn, original, [reason]); } - function SetUpReadableStreamDefaultControllerFromUnderlyingSource(stream, underlyingSource, highWaterMark, sizeAlgorithm) { - const controller = Object.create(ReadableStreamDefaultController.prototype); - let startAlgorithm; - let pullAlgorithm; - let cancelAlgorithm; - if (underlyingSource.start !== void 0) { - startAlgorithm = () => underlyingSource.start(controller); - } else { - startAlgorithm = () => void 0; + function convertUnderlyingSinkCloseCallback(fn, original, context2) { + assertFunction(fn, context2); + return () => promiseCall(fn, original, []); + } + function convertUnderlyingSinkStartCallback(fn, original, context2) { + assertFunction(fn, context2); + return (controller) => reflectCall(fn, original, [controller]); + } + function convertUnderlyingSinkWriteCallback(fn, original, context2) { + assertFunction(fn, context2); + return (chunk, controller) => promiseCall(fn, original, [chunk, controller]); + } + function assertWritableStream(x2, context2) { + if (!IsWritableStream(x2)) { + throw new TypeError(`${context2} is not a WritableStream.`); } - if (underlyingSource.pull !== void 0) { - pullAlgorithm = () => underlyingSource.pull(controller); - } else { - pullAlgorithm = () => promiseResolvedWith(void 0); + } + function isAbortSignal2(value) { + if (typeof value !== "object" || value === null) { + return false; } - if (underlyingSource.cancel !== void 0) { - cancelAlgorithm = (reason) => underlyingSource.cancel(reason); - } else { - cancelAlgorithm = () => promiseResolvedWith(void 0); + try { + return typeof value.aborted === "boolean"; + } catch (_a3) { + return false; } - SetUpReadableStreamDefaultController(stream, controller, startAlgorithm, pullAlgorithm, cancelAlgorithm, highWaterMark, sizeAlgorithm); - } - function defaultControllerBrandCheckException$1(name) { - return new TypeError(`ReadableStreamDefaultController.prototype.${name} can only be used on a ReadableStreamDefaultController`); } - function ReadableStreamTee(stream, cloneForBranch2) { - if (IsReadableByteStreamController(stream._readableStreamController)) { - return ReadableByteStreamTee(stream); + const supportsAbortController = typeof AbortController === "function"; + function createAbortController() { + if (supportsAbortController) { + return new AbortController(); } - return ReadableStreamDefaultTee(stream); + return void 0; } - function ReadableStreamDefaultTee(stream, cloneForBranch2) { - const reader = AcquireReadableStreamDefaultReader(stream); - let reading = false; - let readAgain = false; - let canceled1 = false; - let canceled2 = false; - let reason1; - let reason2; - let branch1; - let branch2; - let resolveCancelPromise; - const cancelPromise = newPromise((resolve) => { - resolveCancelPromise = resolve; - }); - function pullAlgorithm() { - if (reading) { - readAgain = true; - return promiseResolvedWith(void 0); + class WritableStream { + constructor(rawUnderlyingSink = {}, rawStrategy = {}) { + if (rawUnderlyingSink === void 0) { + rawUnderlyingSink = null; + } else { + assertObject(rawUnderlyingSink, "First parameter"); } - reading = true; - const readRequest = { - _chunkSteps: (chunk) => { - _queueMicrotask(() => { - readAgain = false; - const chunk1 = chunk; - const chunk2 = chunk; - if (!canceled1) { - ReadableStreamDefaultControllerEnqueue(branch1._readableStreamController, chunk1); - } - if (!canceled2) { - ReadableStreamDefaultControllerEnqueue(branch2._readableStreamController, chunk2); - } - reading = false; - if (readAgain) { - pullAlgorithm(); - } - }); - }, - _closeSteps: () => { - reading = false; - if (!canceled1) { - ReadableStreamDefaultControllerClose(branch1._readableStreamController); - } - if (!canceled2) { - ReadableStreamDefaultControllerClose(branch2._readableStreamController); - } - if (!canceled1 || !canceled2) { - resolveCancelPromise(void 0); - } - }, - _errorSteps: () => { - reading = false; - } - }; - ReadableStreamDefaultReaderRead(reader, readRequest); - return promiseResolvedWith(void 0); + const strategy = convertQueuingStrategy(rawStrategy, "Second parameter"); + const underlyingSink = convertUnderlyingSink(rawUnderlyingSink, "First parameter"); + InitializeWritableStream(this); + const type = underlyingSink.type; + if (type !== void 0) { + throw new RangeError("Invalid type is specified"); + } + const sizeAlgorithm = ExtractSizeAlgorithm(strategy); + const highWaterMark = ExtractHighWaterMark(strategy, 1); + SetUpWritableStreamDefaultControllerFromUnderlyingSink(this, underlyingSink, highWaterMark, sizeAlgorithm); } - function cancel1Algorithm(reason) { - canceled1 = true; - reason1 = reason; - if (canceled2) { - const compositeReason = CreateArrayFromList([reason1, reason2]); - const cancelResult = ReadableStreamCancel(stream, compositeReason); - resolveCancelPromise(cancelResult); + /** + * Returns whether or not the writable stream is locked to a writer. + */ + get locked() { + if (!IsWritableStream(this)) { + throw streamBrandCheckException$2("locked"); } - return cancelPromise; + return IsWritableStreamLocked(this); } - function cancel2Algorithm(reason) { - canceled2 = true; - reason2 = reason; - if (canceled1) { - const compositeReason = CreateArrayFromList([reason1, reason2]); - const cancelResult = ReadableStreamCancel(stream, compositeReason); - resolveCancelPromise(cancelResult); + /** + * Aborts the stream, signaling that the producer can no longer successfully write to the stream and it is to be + * immediately moved to an errored state, with any queued-up writes discarded. This will also execute any abort + * mechanism of the underlying sink. + * + * The returned promise will fulfill if the stream shuts down successfully, or reject if the underlying sink signaled + * that there was an error doing so. Additionally, it will reject with a `TypeError` (without attempting to cancel + * the stream) if the stream is currently locked. + */ + abort(reason = void 0) { + if (!IsWritableStream(this)) { + return promiseRejectedWith(streamBrandCheckException$2("abort")); } - return cancelPromise; + if (IsWritableStreamLocked(this)) { + return promiseRejectedWith(new TypeError("Cannot abort a stream that already has a writer")); + } + return WritableStreamAbort(this, reason); } - function startAlgorithm() { + /** + * Closes the stream. The underlying sink will finish processing any previously-written chunks, before invoking its + * close behavior. During this time any further attempts to write will fail (without erroring the stream). + * + * The method returns a promise that will fulfill if all remaining chunks are successfully written and the stream + * successfully closes, or rejects if an error is encountered during this process. Additionally, it will reject with + * a `TypeError` (without attempting to cancel the stream) if the stream is currently locked. + */ + close() { + if (!IsWritableStream(this)) { + return promiseRejectedWith(streamBrandCheckException$2("close")); + } + if (IsWritableStreamLocked(this)) { + return promiseRejectedWith(new TypeError("Cannot close a stream that already has a writer")); + } + if (WritableStreamCloseQueuedOrInFlight(this)) { + return promiseRejectedWith(new TypeError("Cannot close an already-closing stream")); + } + return WritableStreamClose(this); } - branch1 = CreateReadableStream(startAlgorithm, pullAlgorithm, cancel1Algorithm); - branch2 = CreateReadableStream(startAlgorithm, pullAlgorithm, cancel2Algorithm); - uponRejection(reader._closedPromise, (r2) => { - ReadableStreamDefaultControllerError(branch1._readableStreamController, r2); - ReadableStreamDefaultControllerError(branch2._readableStreamController, r2); - if (!canceled1 || !canceled2) { - resolveCancelPromise(void 0); + /** + * Creates a {@link WritableStreamDefaultWriter | writer} and locks the stream to the new writer. While the stream + * is locked, no other writer can be acquired until this one is released. + * + * This functionality is especially useful for creating abstractions that desire the ability to write to a stream + * without interruption or interleaving. By getting a writer for the stream, you can ensure nobody else can write at + * the same time, which would cause the resulting written data to be unpredictable and probably useless. + */ + getWriter() { + if (!IsWritableStream(this)) { + throw streamBrandCheckException$2("getWriter"); } - return null; - }); - return [branch1, branch2]; + return AcquireWritableStreamDefaultWriter(this); + } } - function ReadableByteStreamTee(stream) { - let reader = AcquireReadableStreamDefaultReader(stream); - let reading = false; - let readAgainForBranch1 = false; - let readAgainForBranch2 = false; - let canceled1 = false; - let canceled2 = false; - let reason1; - let reason2; - let branch1; - let branch2; - let resolveCancelPromise; - const cancelPromise = newPromise((resolve) => { - resolveCancelPromise = resolve; + Object.defineProperties(WritableStream.prototype, { + abort: { enumerable: true }, + close: { enumerable: true }, + getWriter: { enumerable: true }, + locked: { enumerable: true } + }); + setFunctionName(WritableStream.prototype.abort, "abort"); + setFunctionName(WritableStream.prototype.close, "close"); + setFunctionName(WritableStream.prototype.getWriter, "getWriter"); + if (typeof Symbol.toStringTag === "symbol") { + Object.defineProperty(WritableStream.prototype, Symbol.toStringTag, { + value: "WritableStream", + configurable: true }); - function forwardReaderError(thisReader) { - uponRejection(thisReader._closedPromise, (r2) => { - if (thisReader !== reader) { - return null; - } - ReadableByteStreamControllerError(branch1._readableStreamController, r2); - ReadableByteStreamControllerError(branch2._readableStreamController, r2); - if (!canceled1 || !canceled2) { - resolveCancelPromise(void 0); - } - return null; - }); + } + function AcquireWritableStreamDefaultWriter(stream) { + return new WritableStreamDefaultWriter(stream); + } + function CreateWritableStream(startAlgorithm, writeAlgorithm, closeAlgorithm, abortAlgorithm, highWaterMark = 1, sizeAlgorithm = () => 1) { + const stream = Object.create(WritableStream.prototype); + InitializeWritableStream(stream); + const controller = Object.create(WritableStreamDefaultController.prototype); + SetUpWritableStreamDefaultController(stream, controller, startAlgorithm, writeAlgorithm, closeAlgorithm, abortAlgorithm, highWaterMark, sizeAlgorithm); + return stream; + } + function InitializeWritableStream(stream) { + stream._state = "writable"; + stream._storedError = void 0; + stream._writer = void 0; + stream._writableStreamController = void 0; + stream._writeRequests = new SimpleQueue(); + stream._inFlightWriteRequest = void 0; + stream._closeRequest = void 0; + stream._inFlightCloseRequest = void 0; + stream._pendingAbortRequest = void 0; + stream._backpressure = false; + } + function IsWritableStream(x2) { + if (!typeIsObject(x2)) { + return false; } - function pullWithDefaultReader() { - if (IsReadableStreamBYOBReader(reader)) { - ReadableStreamReaderGenericRelease(reader); - reader = AcquireReadableStreamDefaultReader(stream); - forwardReaderError(reader); - } - const readRequest = { - _chunkSteps: (chunk) => { - _queueMicrotask(() => { - readAgainForBranch1 = false; - readAgainForBranch2 = false; - const chunk1 = chunk; - let chunk2 = chunk; - if (!canceled1 && !canceled2) { - try { - chunk2 = CloneAsUint8Array(chunk); - } catch (cloneE) { - ReadableByteStreamControllerError(branch1._readableStreamController, cloneE); - ReadableByteStreamControllerError(branch2._readableStreamController, cloneE); - resolveCancelPromise(ReadableStreamCancel(stream, cloneE)); - return; - } - } - if (!canceled1) { - ReadableByteStreamControllerEnqueue(branch1._readableStreamController, chunk1); - } - if (!canceled2) { - ReadableByteStreamControllerEnqueue(branch2._readableStreamController, chunk2); - } - reading = false; - if (readAgainForBranch1) { - pull1Algorithm(); - } else if (readAgainForBranch2) { - pull2Algorithm(); - } - }); - }, - _closeSteps: () => { - reading = false; - if (!canceled1) { - ReadableByteStreamControllerClose(branch1._readableStreamController); - } - if (!canceled2) { - ReadableByteStreamControllerClose(branch2._readableStreamController); - } - if (branch1._readableStreamController._pendingPullIntos.length > 0) { - ReadableByteStreamControllerRespond(branch1._readableStreamController, 0); - } - if (branch2._readableStreamController._pendingPullIntos.length > 0) { - ReadableByteStreamControllerRespond(branch2._readableStreamController, 0); - } - if (!canceled1 || !canceled2) { - resolveCancelPromise(void 0); - } - }, - _errorSteps: () => { - reading = false; - } - }; - ReadableStreamDefaultReaderRead(reader, readRequest); + if (!Object.prototype.hasOwnProperty.call(x2, "_writableStreamController")) { + return false; } - function pullWithBYOBReader(view, forBranch2) { - if (IsReadableStreamDefaultReader(reader)) { - ReadableStreamReaderGenericRelease(reader); - reader = AcquireReadableStreamBYOBReader(stream); - forwardReaderError(reader); - } - const byobBranch = forBranch2 ? branch2 : branch1; - const otherBranch = forBranch2 ? branch1 : branch2; - const readIntoRequest = { - _chunkSteps: (chunk) => { - _queueMicrotask(() => { - readAgainForBranch1 = false; - readAgainForBranch2 = false; - const byobCanceled = forBranch2 ? canceled2 : canceled1; - const otherCanceled = forBranch2 ? canceled1 : canceled2; - if (!otherCanceled) { - let clonedChunk; - try { - clonedChunk = CloneAsUint8Array(chunk); - } catch (cloneE) { - ReadableByteStreamControllerError(byobBranch._readableStreamController, cloneE); - ReadableByteStreamControllerError(otherBranch._readableStreamController, cloneE); - resolveCancelPromise(ReadableStreamCancel(stream, cloneE)); - return; - } - if (!byobCanceled) { - ReadableByteStreamControllerRespondWithNewView(byobBranch._readableStreamController, chunk); - } - ReadableByteStreamControllerEnqueue(otherBranch._readableStreamController, clonedChunk); - } else if (!byobCanceled) { - ReadableByteStreamControllerRespondWithNewView(byobBranch._readableStreamController, chunk); - } - reading = false; - if (readAgainForBranch1) { - pull1Algorithm(); - } else if (readAgainForBranch2) { - pull2Algorithm(); - } - }); - }, - _closeSteps: (chunk) => { - reading = false; - const byobCanceled = forBranch2 ? canceled2 : canceled1; - const otherCanceled = forBranch2 ? canceled1 : canceled2; - if (!byobCanceled) { - ReadableByteStreamControllerClose(byobBranch._readableStreamController); - } - if (!otherCanceled) { - ReadableByteStreamControllerClose(otherBranch._readableStreamController); - } - if (chunk !== void 0) { - if (!byobCanceled) { - ReadableByteStreamControllerRespondWithNewView(byobBranch._readableStreamController, chunk); - } - if (!otherCanceled && otherBranch._readableStreamController._pendingPullIntos.length > 0) { - ReadableByteStreamControllerRespond(otherBranch._readableStreamController, 0); - } - } - if (!byobCanceled || !otherCanceled) { - resolveCancelPromise(void 0); - } - }, - _errorSteps: () => { - reading = false; - } - }; - ReadableStreamBYOBReaderRead(reader, view, 1, readIntoRequest); + return x2 instanceof WritableStream; + } + function IsWritableStreamLocked(stream) { + if (stream._writer === void 0) { + return false; } - function pull1Algorithm() { - if (reading) { - readAgainForBranch1 = true; - return promiseResolvedWith(void 0); - } - reading = true; - const byobRequest = ReadableByteStreamControllerGetBYOBRequest(branch1._readableStreamController); - if (byobRequest === null) { - pullWithDefaultReader(); - } else { - pullWithBYOBReader(byobRequest._view, false); - } + return true; + } + function WritableStreamAbort(stream, reason) { + var _a3; + if (stream._state === "closed" || stream._state === "errored") { return promiseResolvedWith(void 0); } - function pull2Algorithm() { - if (reading) { - readAgainForBranch2 = true; - return promiseResolvedWith(void 0); - } - reading = true; - const byobRequest = ReadableByteStreamControllerGetBYOBRequest(branch2._readableStreamController); - if (byobRequest === null) { - pullWithDefaultReader(); - } else { - pullWithBYOBReader(byobRequest._view, true); - } + stream._writableStreamController._abortReason = reason; + (_a3 = stream._writableStreamController._abortController) === null || _a3 === void 0 ? void 0 : _a3.abort(reason); + const state = stream._state; + if (state === "closed" || state === "errored") { return promiseResolvedWith(void 0); } - function cancel1Algorithm(reason) { - canceled1 = true; - reason1 = reason; - if (canceled2) { - const compositeReason = CreateArrayFromList([reason1, reason2]); - const cancelResult = ReadableStreamCancel(stream, compositeReason); - resolveCancelPromise(cancelResult); - } - return cancelPromise; + if (stream._pendingAbortRequest !== void 0) { + return stream._pendingAbortRequest._promise; } - function cancel2Algorithm(reason) { - canceled2 = true; - reason2 = reason; - if (canceled1) { - const compositeReason = CreateArrayFromList([reason1, reason2]); - const cancelResult = ReadableStreamCancel(stream, compositeReason); - resolveCancelPromise(cancelResult); - } - return cancelPromise; + let wasAlreadyErroring = false; + if (state === "erroring") { + wasAlreadyErroring = true; + reason = void 0; } - function startAlgorithm() { - return; + const promise = newPromise((resolve, reject) => { + stream._pendingAbortRequest = { + _promise: void 0, + _resolve: resolve, + _reject: reject, + _reason: reason, + _wasAlreadyErroring: wasAlreadyErroring + }; + }); + stream._pendingAbortRequest._promise = promise; + if (!wasAlreadyErroring) { + WritableStreamStartErroring(stream, reason); } - branch1 = CreateReadableByteStream(startAlgorithm, pull1Algorithm, cancel1Algorithm); - branch2 = CreateReadableByteStream(startAlgorithm, pull2Algorithm, cancel2Algorithm); - forwardReaderError(reader); - return [branch1, branch2]; + return promise; } - function isReadableStreamLike(stream) { - return typeIsObject(stream) && typeof stream.getReader !== "undefined"; + function WritableStreamClose(stream) { + const state = stream._state; + if (state === "closed" || state === "errored") { + return promiseRejectedWith(new TypeError(`The stream (in ${state} state) is not in the writable state and cannot be closed`)); + } + const promise = newPromise((resolve, reject) => { + const closeRequest = { + _resolve: resolve, + _reject: reject + }; + stream._closeRequest = closeRequest; + }); + const writer = stream._writer; + if (writer !== void 0 && stream._backpressure && state === "writable") { + defaultWriterReadyPromiseResolve(writer); + } + WritableStreamDefaultControllerClose(stream._writableStreamController); + return promise; } - function ReadableStreamFrom(source) { - if (isReadableStreamLike(source)) { - return ReadableStreamFromDefaultReader(source.getReader()); + function WritableStreamAddWriteRequest(stream) { + const promise = newPromise((resolve, reject) => { + const writeRequest = { + _resolve: resolve, + _reject: reject + }; + stream._writeRequests.push(writeRequest); + }); + return promise; + } + function WritableStreamDealWithRejection(stream, error) { + const state = stream._state; + if (state === "writable") { + WritableStreamStartErroring(stream, error); + return; } - return ReadableStreamFromIterable(source); + WritableStreamFinishErroring(stream); } - function ReadableStreamFromIterable(asyncIterable) { - let stream; - const iteratorRecord = GetIterator(asyncIterable, "async"); - const startAlgorithm = noop3; - function pullAlgorithm() { - let nextResult; - try { - nextResult = IteratorNext(iteratorRecord); - } catch (e2) { - return promiseRejectedWith(e2); - } - const nextPromise = promiseResolvedWith(nextResult); - return transformPromiseWith(nextPromise, (iterResult) => { - if (!typeIsObject(iterResult)) { - throw new TypeError("The promise returned by the iterator.next() method must fulfill with an object"); - } - const done = IteratorComplete(iterResult); - if (done) { - ReadableStreamDefaultControllerClose(stream._readableStreamController); - } else { - const value = IteratorValue(iterResult); - ReadableStreamDefaultControllerEnqueue(stream._readableStreamController, value); - } - }); + function WritableStreamStartErroring(stream, reason) { + const controller = stream._writableStreamController; + stream._state = "erroring"; + stream._storedError = reason; + const writer = stream._writer; + if (writer !== void 0) { + WritableStreamDefaultWriterEnsureReadyPromiseRejected(writer, reason); } - function cancelAlgorithm(reason) { - const iterator2 = iteratorRecord.iterator; - let returnMethod; - try { - returnMethod = GetMethod(iterator2, "return"); - } catch (e2) { - return promiseRejectedWith(e2); - } - if (returnMethod === void 0) { - return promiseResolvedWith(void 0); - } - let returnResult; - try { - returnResult = reflectCall(returnMethod, iterator2, [reason]); - } catch (e2) { - return promiseRejectedWith(e2); - } - const returnPromise = promiseResolvedWith(returnResult); - return transformPromiseWith(returnPromise, (iterResult) => { - if (!typeIsObject(iterResult)) { - throw new TypeError("The promise returned by the iterator.return() method must fulfill with an object"); - } - return void 0; - }); + if (!WritableStreamHasOperationMarkedInFlight(stream) && controller._started) { + WritableStreamFinishErroring(stream); } - stream = CreateReadableStream(startAlgorithm, pullAlgorithm, cancelAlgorithm, 0); - return stream; } - function ReadableStreamFromDefaultReader(reader) { - let stream; - const startAlgorithm = noop3; - function pullAlgorithm() { - let readPromise; - try { - readPromise = reader.read(); - } catch (e2) { - return promiseRejectedWith(e2); - } - return transformPromiseWith(readPromise, (readResult) => { - if (!typeIsObject(readResult)) { - throw new TypeError("The promise returned by the reader.read() method must fulfill with an object"); - } - if (readResult.done) { - ReadableStreamDefaultControllerClose(stream._readableStreamController); - } else { - const value = readResult.value; - ReadableStreamDefaultControllerEnqueue(stream._readableStreamController, value); - } - }); + function WritableStreamFinishErroring(stream) { + stream._state = "errored"; + stream._writableStreamController[ErrorSteps](); + const storedError = stream._storedError; + stream._writeRequests.forEach((writeRequest) => { + writeRequest._reject(storedError); + }); + stream._writeRequests = new SimpleQueue(); + if (stream._pendingAbortRequest === void 0) { + WritableStreamRejectCloseAndClosedPromiseIfNeeded(stream); + return; } - function cancelAlgorithm(reason) { - try { - return promiseResolvedWith(reader.cancel(reason)); - } catch (e2) { - return promiseRejectedWith(e2); - } + const abortRequest = stream._pendingAbortRequest; + stream._pendingAbortRequest = void 0; + if (abortRequest._wasAlreadyErroring) { + abortRequest._reject(storedError); + WritableStreamRejectCloseAndClosedPromiseIfNeeded(stream); + return; } - stream = CreateReadableStream(startAlgorithm, pullAlgorithm, cancelAlgorithm, 0); - return stream; - } - function convertUnderlyingDefaultOrByteSource(source, context2) { - assertDictionary(source, context2); - const original = source; - const autoAllocateChunkSize = original === null || original === void 0 ? void 0 : original.autoAllocateChunkSize; - const cancel = original === null || original === void 0 ? void 0 : original.cancel; - const pull = original === null || original === void 0 ? void 0 : original.pull; - const start = original === null || original === void 0 ? void 0 : original.start; - const type = original === null || original === void 0 ? void 0 : original.type; - return { - autoAllocateChunkSize: autoAllocateChunkSize === void 0 ? void 0 : convertUnsignedLongLongWithEnforceRange(autoAllocateChunkSize, `${context2} has member 'autoAllocateChunkSize' that`), - cancel: cancel === void 0 ? void 0 : convertUnderlyingSourceCancelCallback(cancel, original, `${context2} has member 'cancel' that`), - pull: pull === void 0 ? void 0 : convertUnderlyingSourcePullCallback(pull, original, `${context2} has member 'pull' that`), - start: start === void 0 ? void 0 : convertUnderlyingSourceStartCallback(start, original, `${context2} has member 'start' that`), - type: type === void 0 ? void 0 : convertReadableStreamType(type, `${context2} has member 'type' that`) - }; - } - function convertUnderlyingSourceCancelCallback(fn, original, context2) { - assertFunction(fn, context2); - return (reason) => promiseCall(fn, original, [reason]); + const promise = stream._writableStreamController[AbortSteps](abortRequest._reason); + uponPromise(promise, () => { + abortRequest._resolve(); + WritableStreamRejectCloseAndClosedPromiseIfNeeded(stream); + return null; + }, (reason) => { + abortRequest._reject(reason); + WritableStreamRejectCloseAndClosedPromiseIfNeeded(stream); + return null; + }); } - function convertUnderlyingSourcePullCallback(fn, original, context2) { - assertFunction(fn, context2); - return (controller) => promiseCall(fn, original, [controller]); + function WritableStreamFinishInFlightWrite(stream) { + stream._inFlightWriteRequest._resolve(void 0); + stream._inFlightWriteRequest = void 0; } - function convertUnderlyingSourceStartCallback(fn, original, context2) { - assertFunction(fn, context2); - return (controller) => reflectCall(fn, original, [controller]); + function WritableStreamFinishInFlightWriteWithError(stream, error) { + stream._inFlightWriteRequest._reject(error); + stream._inFlightWriteRequest = void 0; + WritableStreamDealWithRejection(stream, error); } - function convertReadableStreamType(type, context2) { - type = `${type}`; - if (type !== "bytes") { - throw new TypeError(`${context2} '${type}' is not a valid enumeration value for ReadableStreamType`); + function WritableStreamFinishInFlightClose(stream) { + stream._inFlightCloseRequest._resolve(void 0); + stream._inFlightCloseRequest = void 0; + const state = stream._state; + if (state === "erroring") { + stream._storedError = void 0; + if (stream._pendingAbortRequest !== void 0) { + stream._pendingAbortRequest._resolve(); + stream._pendingAbortRequest = void 0; + } + } + stream._state = "closed"; + const writer = stream._writer; + if (writer !== void 0) { + defaultWriterClosedPromiseResolve(writer); } - return type; } - function convertIteratorOptions(options, context2) { - assertDictionary(options, context2); - const preventCancel = options === null || options === void 0 ? void 0 : options.preventCancel; - return { preventCancel: Boolean(preventCancel) }; + function WritableStreamFinishInFlightCloseWithError(stream, error) { + stream._inFlightCloseRequest._reject(error); + stream._inFlightCloseRequest = void 0; + if (stream._pendingAbortRequest !== void 0) { + stream._pendingAbortRequest._reject(error); + stream._pendingAbortRequest = void 0; + } + WritableStreamDealWithRejection(stream, error); } - function convertPipeOptions(options, context2) { - assertDictionary(options, context2); - const preventAbort = options === null || options === void 0 ? void 0 : options.preventAbort; - const preventCancel = options === null || options === void 0 ? void 0 : options.preventCancel; - const preventClose = options === null || options === void 0 ? void 0 : options.preventClose; - const signal = options === null || options === void 0 ? void 0 : options.signal; - if (signal !== void 0) { - assertAbortSignal(signal, `${context2} has member 'signal' that`); + function WritableStreamCloseQueuedOrInFlight(stream) { + if (stream._closeRequest === void 0 && stream._inFlightCloseRequest === void 0) { + return false; } - return { - preventAbort: Boolean(preventAbort), - preventCancel: Boolean(preventCancel), - preventClose: Boolean(preventClose), - signal - }; + return true; } - function assertAbortSignal(signal, context2) { - if (!isAbortSignal2(signal)) { - throw new TypeError(`${context2} is not an AbortSignal.`); + function WritableStreamHasOperationMarkedInFlight(stream) { + if (stream._inFlightWriteRequest === void 0 && stream._inFlightCloseRequest === void 0) { + return false; } + return true; } - function convertReadableWritablePair(pair, context2) { - assertDictionary(pair, context2); - const readable = pair === null || pair === void 0 ? void 0 : pair.readable; - assertRequiredField(readable, "readable", "ReadableWritablePair"); - assertReadableStream(readable, `${context2} has member 'readable' that`); - const writable = pair === null || pair === void 0 ? void 0 : pair.writable; - assertRequiredField(writable, "writable", "ReadableWritablePair"); - assertWritableStream(writable, `${context2} has member 'writable' that`); - return { readable, writable }; + function WritableStreamMarkCloseRequestInFlight(stream) { + stream._inFlightCloseRequest = stream._closeRequest; + stream._closeRequest = void 0; } - class ReadableStream2 { - constructor(rawUnderlyingSource = {}, rawStrategy = {}) { - if (rawUnderlyingSource === void 0) { - rawUnderlyingSource = null; + function WritableStreamMarkFirstWriteRequestInFlight(stream) { + stream._inFlightWriteRequest = stream._writeRequests.shift(); + } + function WritableStreamRejectCloseAndClosedPromiseIfNeeded(stream) { + if (stream._closeRequest !== void 0) { + stream._closeRequest._reject(stream._storedError); + stream._closeRequest = void 0; + } + const writer = stream._writer; + if (writer !== void 0) { + defaultWriterClosedPromiseReject(writer, stream._storedError); + } + } + function WritableStreamUpdateBackpressure(stream, backpressure) { + const writer = stream._writer; + if (writer !== void 0 && backpressure !== stream._backpressure) { + if (backpressure) { + defaultWriterReadyPromiseReset(writer); } else { - assertObject(rawUnderlyingSource, "First parameter"); + defaultWriterReadyPromiseResolve(writer); } - const strategy = convertQueuingStrategy(rawStrategy, "Second parameter"); - const underlyingSource = convertUnderlyingDefaultOrByteSource(rawUnderlyingSource, "First parameter"); - InitializeReadableStream(this); - if (underlyingSource.type === "bytes") { - if (strategy.size !== void 0) { - throw new RangeError("The strategy for a byte stream cannot have a size function"); + } + stream._backpressure = backpressure; + } + class WritableStreamDefaultWriter { + constructor(stream) { + assertRequiredArgument(stream, 1, "WritableStreamDefaultWriter"); + assertWritableStream(stream, "First parameter"); + if (IsWritableStreamLocked(stream)) { + throw new TypeError("This stream has already been locked for exclusive writing by another writer"); + } + this._ownerWritableStream = stream; + stream._writer = this; + const state = stream._state; + if (state === "writable") { + if (!WritableStreamCloseQueuedOrInFlight(stream) && stream._backpressure) { + defaultWriterReadyPromiseInitialize(this); + } else { + defaultWriterReadyPromiseInitializeAsResolved(this); } - const highWaterMark = ExtractHighWaterMark(strategy, 0); - SetUpReadableByteStreamControllerFromUnderlyingSource(this, underlyingSource, highWaterMark); + defaultWriterClosedPromiseInitialize(this); + } else if (state === "erroring") { + defaultWriterReadyPromiseInitializeAsRejected(this, stream._storedError); + defaultWriterClosedPromiseInitialize(this); + } else if (state === "closed") { + defaultWriterReadyPromiseInitializeAsResolved(this); + defaultWriterClosedPromiseInitializeAsResolved(this); } else { - const sizeAlgorithm = ExtractSizeAlgorithm(strategy); - const highWaterMark = ExtractHighWaterMark(strategy, 1); - SetUpReadableStreamDefaultControllerFromUnderlyingSource(this, underlyingSource, highWaterMark, sizeAlgorithm); + const storedError = stream._storedError; + defaultWriterReadyPromiseInitializeAsRejected(this, storedError); + defaultWriterClosedPromiseInitializeAsRejected(this, storedError); } } /** - * Whether or not the readable stream is locked to a {@link ReadableStreamDefaultReader | reader}. + * Returns a promise that will be fulfilled when the stream becomes closed, or rejected if the stream ever errors or + * the writer’s lock is released before the stream finishes closing. */ - get locked() { - if (!IsReadableStream(this)) { - throw streamBrandCheckException$1("locked"); + get closed() { + if (!IsWritableStreamDefaultWriter(this)) { + return promiseRejectedWith(defaultWriterBrandCheckException("closed")); } - return IsReadableStreamLocked(this); + return this._closedPromise; } /** - * Cancels the stream, signaling a loss of interest in the stream by a consumer. + * Returns the desired size to fill the stream’s internal queue. It can be negative, if the queue is over-full. + * A producer can use this information to determine the right amount of data to write. * - * The supplied `reason` argument will be given to the underlying source's {@link UnderlyingSource.cancel | cancel()} - * method, which might or might not use it. + * It will be `null` if the stream cannot be successfully written to (due to either being errored, or having an abort + * queued up). It will return zero if the stream is closed. And the getter will throw an exception if invoked when + * the writer’s lock is released. */ - cancel(reason = void 0) { - if (!IsReadableStream(this)) { - return promiseRejectedWith(streamBrandCheckException$1("cancel")); + get desiredSize() { + if (!IsWritableStreamDefaultWriter(this)) { + throw defaultWriterBrandCheckException("desiredSize"); } - if (IsReadableStreamLocked(this)) { - return promiseRejectedWith(new TypeError("Cannot cancel a stream that already has a reader")); + if (this._ownerWritableStream === void 0) { + throw defaultWriterLockException("desiredSize"); } - return ReadableStreamCancel(this, reason); + return WritableStreamDefaultWriterGetDesiredSize(this); } - getReader(rawOptions = void 0) { - if (!IsReadableStream(this)) { - throw streamBrandCheckException$1("getReader"); - } - const options = convertReaderOptions(rawOptions, "First parameter"); - if (options.mode === void 0) { - return AcquireReadableStreamDefaultReader(this); + /** + * Returns a promise that will be fulfilled when the desired size to fill the stream’s internal queue transitions + * from non-positive to positive, signaling that it is no longer applying backpressure. Once the desired size dips + * back to zero or below, the getter will return a new promise that stays pending until the next transition. + * + * If the stream becomes errored or aborted, or the writer’s lock is released, the returned promise will become + * rejected. + */ + get ready() { + if (!IsWritableStreamDefaultWriter(this)) { + return promiseRejectedWith(defaultWriterBrandCheckException("ready")); } - return AcquireReadableStreamBYOBReader(this); + return this._readyPromise; } - pipeThrough(rawTransform, rawOptions = {}) { - if (!IsReadableStream(this)) { - throw streamBrandCheckException$1("pipeThrough"); - } - assertRequiredArgument(rawTransform, 1, "pipeThrough"); - const transform = convertReadableWritablePair(rawTransform, "First parameter"); - const options = convertPipeOptions(rawOptions, "Second parameter"); - if (IsReadableStreamLocked(this)) { - throw new TypeError("ReadableStream.prototype.pipeThrough cannot be used on a locked ReadableStream"); + /** + * If the reader is active, behaves the same as {@link WritableStream.abort | stream.abort(reason)}. + */ + abort(reason = void 0) { + if (!IsWritableStreamDefaultWriter(this)) { + return promiseRejectedWith(defaultWriterBrandCheckException("abort")); } - if (IsWritableStreamLocked(transform.writable)) { - throw new TypeError("ReadableStream.prototype.pipeThrough cannot be used on a locked WritableStream"); + if (this._ownerWritableStream === void 0) { + return promiseRejectedWith(defaultWriterLockException("abort")); } - const promise = ReadableStreamPipeTo(this, transform.writable, options.preventClose, options.preventAbort, options.preventCancel, options.signal); - setPromiseIsHandledToTrue(promise); - return transform.readable; + return WritableStreamDefaultWriterAbort(this, reason); } - pipeTo(destination, rawOptions = {}) { - if (!IsReadableStream(this)) { - return promiseRejectedWith(streamBrandCheckException$1("pipeTo")); - } - if (destination === void 0) { - return promiseRejectedWith(`Parameter 1 is required in 'pipeTo'.`); - } - if (!IsWritableStream(destination)) { - return promiseRejectedWith(new TypeError(`ReadableStream.prototype.pipeTo's first argument must be a WritableStream`)); - } - let options; - try { - options = convertPipeOptions(rawOptions, "Second parameter"); - } catch (e2) { - return promiseRejectedWith(e2); + /** + * If the reader is active, behaves the same as {@link WritableStream.close | stream.close()}. + */ + close() { + if (!IsWritableStreamDefaultWriter(this)) { + return promiseRejectedWith(defaultWriterBrandCheckException("close")); } - if (IsReadableStreamLocked(this)) { - return promiseRejectedWith(new TypeError("ReadableStream.prototype.pipeTo cannot be used on a locked ReadableStream")); + const stream = this._ownerWritableStream; + if (stream === void 0) { + return promiseRejectedWith(defaultWriterLockException("close")); } - if (IsWritableStreamLocked(destination)) { - return promiseRejectedWith(new TypeError("ReadableStream.prototype.pipeTo cannot be used on a locked WritableStream")); + if (WritableStreamCloseQueuedOrInFlight(stream)) { + return promiseRejectedWith(new TypeError("Cannot close an already-closing stream")); } - return ReadableStreamPipeTo(this, destination, options.preventClose, options.preventAbort, options.preventCancel, options.signal); + return WritableStreamDefaultWriterClose(this); } /** - * Tees this readable stream, returning a two-element array containing the two resulting branches as - * new {@link ReadableStream} instances. - * - * Teeing a stream will lock it, preventing any other consumer from acquiring a reader. - * To cancel the stream, cancel both of the resulting branches; a composite cancellation reason will then be - * propagated to the stream's underlying source. + * Releases the writer’s lock on the corresponding stream. After the lock is released, the writer is no longer active. + * If the associated stream is errored when the lock is released, the writer will appear errored in the same way from + * now on; otherwise, the writer will appear closed. * - * Note that the chunks seen in each branch will be the same object. If the chunks are not immutable, - * this could allow interference between the two branches. + * Note that the lock can still be released even if some ongoing writes have not yet finished (i.e. even if the + * promises returned from previous calls to {@link WritableStreamDefaultWriter.write | write()} have not yet settled). + * It’s not necessary to hold the lock on the writer for the duration of the write; the lock instead simply prevents + * other producers from writing in an interleaved manner. */ - tee() { - if (!IsReadableStream(this)) { - throw streamBrandCheckException$1("tee"); + releaseLock() { + if (!IsWritableStreamDefaultWriter(this)) { + throw defaultWriterBrandCheckException("releaseLock"); } - const branches = ReadableStreamTee(this); - return CreateArrayFromList(branches); - } - values(rawOptions = void 0) { - if (!IsReadableStream(this)) { - throw streamBrandCheckException$1("values"); + const stream = this._ownerWritableStream; + if (stream === void 0) { + return; } - const options = convertIteratorOptions(rawOptions, "First parameter"); - return AcquireReadableStreamAsyncIterator(this, options.preventCancel); - } - [SymbolAsyncIterator](options) { - return this.values(options); + WritableStreamDefaultWriterRelease(this); } - /** - * Creates a new ReadableStream wrapping the provided iterable or async iterable. - * - * This can be used to adapt various kinds of objects into a readable stream, - * such as an array, an async generator, or a Node.js readable stream. - */ - static from(asyncIterable) { - return ReadableStreamFrom(asyncIterable); + write(chunk = void 0) { + if (!IsWritableStreamDefaultWriter(this)) { + return promiseRejectedWith(defaultWriterBrandCheckException("write")); + } + if (this._ownerWritableStream === void 0) { + return promiseRejectedWith(defaultWriterLockException("write to")); + } + return WritableStreamDefaultWriterWrite(this, chunk); } } - Object.defineProperties(ReadableStream2, { - from: { enumerable: true } - }); - Object.defineProperties(ReadableStream2.prototype, { - cancel: { enumerable: true }, - getReader: { enumerable: true }, - pipeThrough: { enumerable: true }, - pipeTo: { enumerable: true }, - tee: { enumerable: true }, - values: { enumerable: true }, - locked: { enumerable: true } + Object.defineProperties(WritableStreamDefaultWriter.prototype, { + abort: { enumerable: true }, + close: { enumerable: true }, + releaseLock: { enumerable: true }, + write: { enumerable: true }, + closed: { enumerable: true }, + desiredSize: { enumerable: true }, + ready: { enumerable: true } }); - setFunctionName(ReadableStream2.from, "from"); - setFunctionName(ReadableStream2.prototype.cancel, "cancel"); - setFunctionName(ReadableStream2.prototype.getReader, "getReader"); - setFunctionName(ReadableStream2.prototype.pipeThrough, "pipeThrough"); - setFunctionName(ReadableStream2.prototype.pipeTo, "pipeTo"); - setFunctionName(ReadableStream2.prototype.tee, "tee"); - setFunctionName(ReadableStream2.prototype.values, "values"); + setFunctionName(WritableStreamDefaultWriter.prototype.abort, "abort"); + setFunctionName(WritableStreamDefaultWriter.prototype.close, "close"); + setFunctionName(WritableStreamDefaultWriter.prototype.releaseLock, "releaseLock"); + setFunctionName(WritableStreamDefaultWriter.prototype.write, "write"); if (typeof Symbol.toStringTag === "symbol") { - Object.defineProperty(ReadableStream2.prototype, Symbol.toStringTag, { - value: "ReadableStream", + Object.defineProperty(WritableStreamDefaultWriter.prototype, Symbol.toStringTag, { + value: "WritableStreamDefaultWriter", configurable: true }); } - Object.defineProperty(ReadableStream2.prototype, SymbolAsyncIterator, { - value: ReadableStream2.prototype.values, - writable: true, - configurable: true - }); - function CreateReadableStream(startAlgorithm, pullAlgorithm, cancelAlgorithm, highWaterMark = 1, sizeAlgorithm = () => 1) { - const stream = Object.create(ReadableStream2.prototype); - InitializeReadableStream(stream); - const controller = Object.create(ReadableStreamDefaultController.prototype); - SetUpReadableStreamDefaultController(stream, controller, startAlgorithm, pullAlgorithm, cancelAlgorithm, highWaterMark, sizeAlgorithm); - return stream; - } - function CreateReadableByteStream(startAlgorithm, pullAlgorithm, cancelAlgorithm) { - const stream = Object.create(ReadableStream2.prototype); - InitializeReadableStream(stream); - const controller = Object.create(ReadableByteStreamController.prototype); - SetUpReadableByteStreamController(stream, controller, startAlgorithm, pullAlgorithm, cancelAlgorithm, 0, void 0); - return stream; - } - function InitializeReadableStream(stream) { - stream._state = "readable"; - stream._reader = void 0; - stream._storedError = void 0; - stream._disturbed = false; - } - function IsReadableStream(x2) { + function IsWritableStreamDefaultWriter(x2) { if (!typeIsObject(x2)) { return false; } - if (!Object.prototype.hasOwnProperty.call(x2, "_readableStreamController")) { + if (!Object.prototype.hasOwnProperty.call(x2, "_ownerWritableStream")) { return false; } - return x2 instanceof ReadableStream2; + return x2 instanceof WritableStreamDefaultWriter; } - function IsReadableStreamLocked(stream) { - if (stream._reader === void 0) { - return false; - } - return true; + function WritableStreamDefaultWriterAbort(writer, reason) { + const stream = writer._ownerWritableStream; + return WritableStreamAbort(stream, reason); } - function ReadableStreamCancel(stream, reason) { - stream._disturbed = true; - if (stream._state === "closed") { + function WritableStreamDefaultWriterClose(writer) { + const stream = writer._ownerWritableStream; + return WritableStreamClose(stream); + } + function WritableStreamDefaultWriterCloseWithErrorPropagation(writer) { + const stream = writer._ownerWritableStream; + const state = stream._state; + if (WritableStreamCloseQueuedOrInFlight(stream) || state === "closed") { return promiseResolvedWith(void 0); } - if (stream._state === "errored") { + if (state === "errored") { return promiseRejectedWith(stream._storedError); } - ReadableStreamClose(stream); - const reader = stream._reader; - if (reader !== void 0 && IsReadableStreamBYOBReader(reader)) { - const readIntoRequests = reader._readIntoRequests; - reader._readIntoRequests = new SimpleQueue(); - readIntoRequests.forEach((readIntoRequest) => { - readIntoRequest._closeSteps(void 0); - }); - } - const sourceCancelPromise = stream._readableStreamController[CancelSteps](reason); - return transformPromiseWith(sourceCancelPromise, noop3); + return WritableStreamDefaultWriterClose(writer); } - function ReadableStreamClose(stream) { - stream._state = "closed"; - const reader = stream._reader; - if (reader === void 0) { - return; - } - defaultReaderClosedPromiseResolve(reader); - if (IsReadableStreamDefaultReader(reader)) { - const readRequests = reader._readRequests; - reader._readRequests = new SimpleQueue(); - readRequests.forEach((readRequest) => { - readRequest._closeSteps(); - }); + function WritableStreamDefaultWriterEnsureClosedPromiseRejected(writer, error) { + if (writer._closedPromiseState === "pending") { + defaultWriterClosedPromiseReject(writer, error); + } else { + defaultWriterClosedPromiseResetToRejected(writer, error); } } - function ReadableStreamError(stream, e2) { - stream._state = "errored"; - stream._storedError = e2; - const reader = stream._reader; - if (reader === void 0) { - return; - } - defaultReaderClosedPromiseReject(reader, e2); - if (IsReadableStreamDefaultReader(reader)) { - ReadableStreamDefaultReaderErrorReadRequests(reader, e2); + function WritableStreamDefaultWriterEnsureReadyPromiseRejected(writer, error) { + if (writer._readyPromiseState === "pending") { + defaultWriterReadyPromiseReject(writer, error); } else { - ReadableStreamBYOBReaderErrorReadIntoRequests(reader, e2); + defaultWriterReadyPromiseResetToRejected(writer, error); } } - function streamBrandCheckException$1(name) { - return new TypeError(`ReadableStream.prototype.${name} can only be used on a ReadableStream`); - } - function convertQueuingStrategyInit(init, context2) { - assertDictionary(init, context2); - const highWaterMark = init === null || init === void 0 ? void 0 : init.highWaterMark; - assertRequiredField(highWaterMark, "highWaterMark", "QueuingStrategyInit"); - return { - highWaterMark: convertUnrestrictedDouble(highWaterMark) - }; - } - const byteLengthSizeFunction = (chunk) => { - return chunk.byteLength; - }; - setFunctionName(byteLengthSizeFunction, "size"); - class ByteLengthQueuingStrategy { - constructor(options) { - assertRequiredArgument(options, 1, "ByteLengthQueuingStrategy"); - options = convertQueuingStrategyInit(options, "First parameter"); - this._byteLengthQueuingStrategyHighWaterMark = options.highWaterMark; - } - /** - * Returns the high water mark provided to the constructor. - */ - get highWaterMark() { - if (!IsByteLengthQueuingStrategy(this)) { - throw byteLengthBrandCheckException("highWaterMark"); - } - return this._byteLengthQueuingStrategyHighWaterMark; + function WritableStreamDefaultWriterGetDesiredSize(writer) { + const stream = writer._ownerWritableStream; + const state = stream._state; + if (state === "errored" || state === "erroring") { + return null; } - /** - * Measures the size of `chunk` by returning the value of its `byteLength` property. - */ - get size() { - if (!IsByteLengthQueuingStrategy(this)) { - throw byteLengthBrandCheckException("size"); - } - return byteLengthSizeFunction; + if (state === "closed") { + return 0; } + return WritableStreamDefaultControllerGetDesiredSize(stream._writableStreamController); } - Object.defineProperties(ByteLengthQueuingStrategy.prototype, { - highWaterMark: { enumerable: true }, - size: { enumerable: true } - }); - if (typeof Symbol.toStringTag === "symbol") { - Object.defineProperty(ByteLengthQueuingStrategy.prototype, Symbol.toStringTag, { - value: "ByteLengthQueuingStrategy", - configurable: true - }); - } - function byteLengthBrandCheckException(name) { - return new TypeError(`ByteLengthQueuingStrategy.prototype.${name} can only be used on a ByteLengthQueuingStrategy`); + function WritableStreamDefaultWriterRelease(writer) { + const stream = writer._ownerWritableStream; + const releasedError = new TypeError(`Writer was released and can no longer be used to monitor the stream's closedness`); + WritableStreamDefaultWriterEnsureReadyPromiseRejected(writer, releasedError); + WritableStreamDefaultWriterEnsureClosedPromiseRejected(writer, releasedError); + stream._writer = void 0; + writer._ownerWritableStream = void 0; } - function IsByteLengthQueuingStrategy(x2) { - if (!typeIsObject(x2)) { - return false; + function WritableStreamDefaultWriterWrite(writer, chunk) { + const stream = writer._ownerWritableStream; + const controller = stream._writableStreamController; + const chunkSize = WritableStreamDefaultControllerGetChunkSize(controller, chunk); + if (stream !== writer._ownerWritableStream) { + return promiseRejectedWith(defaultWriterLockException("write to")); } - if (!Object.prototype.hasOwnProperty.call(x2, "_byteLengthQueuingStrategyHighWaterMark")) { - return false; + const state = stream._state; + if (state === "errored") { + return promiseRejectedWith(stream._storedError); } - return x2 instanceof ByteLengthQueuingStrategy; + if (WritableStreamCloseQueuedOrInFlight(stream) || state === "closed") { + return promiseRejectedWith(new TypeError("The stream is closing or closed and cannot be written to")); + } + if (state === "erroring") { + return promiseRejectedWith(stream._storedError); + } + const promise = WritableStreamAddWriteRequest(stream); + WritableStreamDefaultControllerWrite(controller, chunk, chunkSize); + return promise; } - const countSizeFunction = () => { - return 1; - }; - setFunctionName(countSizeFunction, "size"); - class CountQueuingStrategy { - constructor(options) { - assertRequiredArgument(options, 1, "CountQueuingStrategy"); - options = convertQueuingStrategyInit(options, "First parameter"); - this._countQueuingStrategyHighWaterMark = options.highWaterMark; + const closeSentinel = {}; + class WritableStreamDefaultController { + constructor() { + throw new TypeError("Illegal constructor"); } /** - * Returns the high water mark provided to the constructor. + * The reason which was passed to `WritableStream.abort(reason)` when the stream was aborted. + * + * @deprecated + * This property has been removed from the specification, see https://github.com/whatwg/streams/pull/1177. + * Use {@link WritableStreamDefaultController.signal}'s `reason` instead. */ - get highWaterMark() { - if (!IsCountQueuingStrategy(this)) { - throw countBrandCheckException("highWaterMark"); + get abortReason() { + if (!IsWritableStreamDefaultController(this)) { + throw defaultControllerBrandCheckException$2("abortReason"); } - return this._countQueuingStrategyHighWaterMark; + return this._abortReason; } /** - * Measures the size of `chunk` by always returning 1. - * This ensures that the total queue size is a count of the number of chunks in the queue. + * An `AbortSignal` that can be used to abort the pending write or close operation when the stream is aborted. */ - get size() { - if (!IsCountQueuingStrategy(this)) { - throw countBrandCheckException("size"); - } - return countSizeFunction; - } - } - Object.defineProperties(CountQueuingStrategy.prototype, { - highWaterMark: { enumerable: true }, - size: { enumerable: true } - }); - if (typeof Symbol.toStringTag === "symbol") { - Object.defineProperty(CountQueuingStrategy.prototype, Symbol.toStringTag, { - value: "CountQueuingStrategy", - configurable: true - }); - } - function countBrandCheckException(name) { - return new TypeError(`CountQueuingStrategy.prototype.${name} can only be used on a CountQueuingStrategy`); - } - function IsCountQueuingStrategy(x2) { - if (!typeIsObject(x2)) { - return false; - } - if (!Object.prototype.hasOwnProperty.call(x2, "_countQueuingStrategyHighWaterMark")) { - return false; - } - return x2 instanceof CountQueuingStrategy; - } - function convertTransformer(original, context2) { - assertDictionary(original, context2); - const cancel = original === null || original === void 0 ? void 0 : original.cancel; - const flush = original === null || original === void 0 ? void 0 : original.flush; - const readableType = original === null || original === void 0 ? void 0 : original.readableType; - const start = original === null || original === void 0 ? void 0 : original.start; - const transform = original === null || original === void 0 ? void 0 : original.transform; - const writableType = original === null || original === void 0 ? void 0 : original.writableType; - return { - cancel: cancel === void 0 ? void 0 : convertTransformerCancelCallback(cancel, original, `${context2} has member 'cancel' that`), - flush: flush === void 0 ? void 0 : convertTransformerFlushCallback(flush, original, `${context2} has member 'flush' that`), - readableType, - start: start === void 0 ? void 0 : convertTransformerStartCallback(start, original, `${context2} has member 'start' that`), - transform: transform === void 0 ? void 0 : convertTransformerTransformCallback(transform, original, `${context2} has member 'transform' that`), - writableType - }; - } - function convertTransformerFlushCallback(fn, original, context2) { - assertFunction(fn, context2); - return (controller) => promiseCall(fn, original, [controller]); - } - function convertTransformerStartCallback(fn, original, context2) { - assertFunction(fn, context2); - return (controller) => reflectCall(fn, original, [controller]); - } - function convertTransformerTransformCallback(fn, original, context2) { - assertFunction(fn, context2); - return (chunk, controller) => promiseCall(fn, original, [chunk, controller]); - } - function convertTransformerCancelCallback(fn, original, context2) { - assertFunction(fn, context2); - return (reason) => promiseCall(fn, original, [reason]); - } - class TransformStream2 { - constructor(rawTransformer = {}, rawWritableStrategy = {}, rawReadableStrategy = {}) { - if (rawTransformer === void 0) { - rawTransformer = null; - } - const writableStrategy = convertQueuingStrategy(rawWritableStrategy, "Second parameter"); - const readableStrategy = convertQueuingStrategy(rawReadableStrategy, "Third parameter"); - const transformer = convertTransformer(rawTransformer, "First parameter"); - if (transformer.readableType !== void 0) { - throw new RangeError("Invalid readableType specified"); - } - if (transformer.writableType !== void 0) { - throw new RangeError("Invalid writableType specified"); + get signal() { + if (!IsWritableStreamDefaultController(this)) { + throw defaultControllerBrandCheckException$2("signal"); } - const readableHighWaterMark = ExtractHighWaterMark(readableStrategy, 0); - const readableSizeAlgorithm = ExtractSizeAlgorithm(readableStrategy); - const writableHighWaterMark = ExtractHighWaterMark(writableStrategy, 1); - const writableSizeAlgorithm = ExtractSizeAlgorithm(writableStrategy); - let startPromise_resolve; - const startPromise = newPromise((resolve) => { - startPromise_resolve = resolve; - }); - InitializeTransformStream(this, startPromise, writableHighWaterMark, writableSizeAlgorithm, readableHighWaterMark, readableSizeAlgorithm); - SetUpTransformStreamDefaultControllerFromTransformer(this, transformer); - if (transformer.start !== void 0) { - startPromise_resolve(transformer.start(this._transformStreamController)); - } else { - startPromise_resolve(void 0); + if (this._abortController === void 0) { + throw new TypeError("WritableStreamDefaultController.prototype.signal is not supported"); } + return this._abortController.signal; } /** - * The readable side of the transform stream. + * Closes the controlled writable stream, making all future interactions with it fail with the given error `e`. + * + * This method is rarely used, since usually it suffices to return a rejected promise from one of the underlying + * sink's methods. However, it can be useful for suddenly shutting down a stream in response to an event outside the + * normal lifecycle of interactions with the underlying sink. */ - get readable() { - if (!IsTransformStream(this)) { - throw streamBrandCheckException("readable"); + error(e2 = void 0) { + if (!IsWritableStreamDefaultController(this)) { + throw defaultControllerBrandCheckException$2("error"); } - return this._readable; - } - /** - * The writable side of the transform stream. - */ - get writable() { - if (!IsTransformStream(this)) { - throw streamBrandCheckException("writable"); + const state = this._controlledWritableStream._state; + if (state !== "writable") { + return; } - return this._writable; + WritableStreamDefaultControllerError(this, e2); + } + /** @internal */ + [AbortSteps](reason) { + const result = this._abortAlgorithm(reason); + WritableStreamDefaultControllerClearAlgorithms(this); + return result; + } + /** @internal */ + [ErrorSteps]() { + ResetQueue(this); } } - Object.defineProperties(TransformStream2.prototype, { - readable: { enumerable: true }, - writable: { enumerable: true } + Object.defineProperties(WritableStreamDefaultController.prototype, { + abortReason: { enumerable: true }, + signal: { enumerable: true }, + error: { enumerable: true } }); if (typeof Symbol.toStringTag === "symbol") { - Object.defineProperty(TransformStream2.prototype, Symbol.toStringTag, { - value: "TransformStream", + Object.defineProperty(WritableStreamDefaultController.prototype, Symbol.toStringTag, { + value: "WritableStreamDefaultController", configurable: true }); } - function InitializeTransformStream(stream, startPromise, writableHighWaterMark, writableSizeAlgorithm, readableHighWaterMark, readableSizeAlgorithm) { - function startAlgorithm() { - return startPromise; - } - function writeAlgorithm(chunk) { - return TransformStreamDefaultSinkWriteAlgorithm(stream, chunk); - } - function abortAlgorithm(reason) { - return TransformStreamDefaultSinkAbortAlgorithm(stream, reason); - } - function closeAlgorithm() { - return TransformStreamDefaultSinkCloseAlgorithm(stream); - } - stream._writable = CreateWritableStream(startAlgorithm, writeAlgorithm, closeAlgorithm, abortAlgorithm, writableHighWaterMark, writableSizeAlgorithm); - function pullAlgorithm() { - return TransformStreamDefaultSourcePullAlgorithm(stream); - } - function cancelAlgorithm(reason) { - return TransformStreamDefaultSourceCancelAlgorithm(stream, reason); - } - stream._readable = CreateReadableStream(startAlgorithm, pullAlgorithm, cancelAlgorithm, readableHighWaterMark, readableSizeAlgorithm); - stream._backpressure = void 0; - stream._backpressureChangePromise = void 0; - stream._backpressureChangePromise_resolve = void 0; - TransformStreamSetBackpressure(stream, true); - stream._transformStreamController = void 0; - } - function IsTransformStream(x2) { + function IsWritableStreamDefaultController(x2) { if (!typeIsObject(x2)) { return false; } - if (!Object.prototype.hasOwnProperty.call(x2, "_transformStreamController")) { + if (!Object.prototype.hasOwnProperty.call(x2, "_controlledWritableStream")) { return false; } - return x2 instanceof TransformStream2; - } - function TransformStreamError(stream, e2) { - ReadableStreamDefaultControllerError(stream._readable._readableStreamController, e2); - TransformStreamErrorWritableAndUnblockWrite(stream, e2); - } - function TransformStreamErrorWritableAndUnblockWrite(stream, e2) { - TransformStreamDefaultControllerClearAlgorithms(stream._transformStreamController); - WritableStreamDefaultControllerErrorIfNeeded(stream._writable._writableStreamController, e2); - TransformStreamUnblockWrite(stream); - } - function TransformStreamUnblockWrite(stream) { - if (stream._backpressure) { - TransformStreamSetBackpressure(stream, false); - } + return x2 instanceof WritableStreamDefaultController; } - function TransformStreamSetBackpressure(stream, backpressure) { - if (stream._backpressureChangePromise !== void 0) { - stream._backpressureChangePromise_resolve(); - } - stream._backpressureChangePromise = newPromise((resolve) => { - stream._backpressureChangePromise_resolve = resolve; + function SetUpWritableStreamDefaultController(stream, controller, startAlgorithm, writeAlgorithm, closeAlgorithm, abortAlgorithm, highWaterMark, sizeAlgorithm) { + controller._controlledWritableStream = stream; + stream._writableStreamController = controller; + controller._queue = void 0; + controller._queueTotalSize = void 0; + ResetQueue(controller); + controller._abortReason = void 0; + controller._abortController = createAbortController(); + controller._started = false; + controller._strategySizeAlgorithm = sizeAlgorithm; + controller._strategyHWM = highWaterMark; + controller._writeAlgorithm = writeAlgorithm; + controller._closeAlgorithm = closeAlgorithm; + controller._abortAlgorithm = abortAlgorithm; + const backpressure = WritableStreamDefaultControllerGetBackpressure(controller); + WritableStreamUpdateBackpressure(stream, backpressure); + const startResult = startAlgorithm(); + const startPromise = promiseResolvedWith(startResult); + uponPromise(startPromise, () => { + controller._started = true; + WritableStreamDefaultControllerAdvanceQueueIfNeeded(controller); + return null; + }, (r2) => { + controller._started = true; + WritableStreamDealWithRejection(stream, r2); + return null; }); - stream._backpressure = backpressure; } - class TransformStreamDefaultController { - constructor() { - throw new TypeError("Illegal constructor"); - } - /** - * Returns the desired size to fill the readable side’s internal queue. It can be negative, if the queue is over-full. - */ - get desiredSize() { - if (!IsTransformStreamDefaultController(this)) { - throw defaultControllerBrandCheckException("desiredSize"); - } - const readableController = this._controlledTransformStream._readable._readableStreamController; - return ReadableStreamDefaultControllerGetDesiredSize(readableController); + function SetUpWritableStreamDefaultControllerFromUnderlyingSink(stream, underlyingSink, highWaterMark, sizeAlgorithm) { + const controller = Object.create(WritableStreamDefaultController.prototype); + let startAlgorithm; + let writeAlgorithm; + let closeAlgorithm; + let abortAlgorithm; + if (underlyingSink.start !== void 0) { + startAlgorithm = () => underlyingSink.start(controller); + } else { + startAlgorithm = () => void 0; } - enqueue(chunk = void 0) { - if (!IsTransformStreamDefaultController(this)) { - throw defaultControllerBrandCheckException("enqueue"); - } - TransformStreamDefaultControllerEnqueue(this, chunk); + if (underlyingSink.write !== void 0) { + writeAlgorithm = (chunk) => underlyingSink.write(chunk, controller); + } else { + writeAlgorithm = () => promiseResolvedWith(void 0); } - /** - * Errors both the readable side and the writable side of the controlled transform stream, making all future - * interactions with it fail with the given error `e`. Any chunks queued for transformation will be discarded. - */ - error(reason = void 0) { - if (!IsTransformStreamDefaultController(this)) { - throw defaultControllerBrandCheckException("error"); - } - TransformStreamDefaultControllerError(this, reason); + if (underlyingSink.close !== void 0) { + closeAlgorithm = () => underlyingSink.close(); + } else { + closeAlgorithm = () => promiseResolvedWith(void 0); } - /** - * Closes the readable side and errors the writable side of the controlled transform stream. This is useful when the - * transformer only needs to consume a portion of the chunks written to the writable side. - */ - terminate() { - if (!IsTransformStreamDefaultController(this)) { - throw defaultControllerBrandCheckException("terminate"); - } - TransformStreamDefaultControllerTerminate(this); + if (underlyingSink.abort !== void 0) { + abortAlgorithm = (reason) => underlyingSink.abort(reason); + } else { + abortAlgorithm = () => promiseResolvedWith(void 0); } + SetUpWritableStreamDefaultController(stream, controller, startAlgorithm, writeAlgorithm, closeAlgorithm, abortAlgorithm, highWaterMark, sizeAlgorithm); } - Object.defineProperties(TransformStreamDefaultController.prototype, { - enqueue: { enumerable: true }, - error: { enumerable: true }, - terminate: { enumerable: true }, - desiredSize: { enumerable: true } - }); - setFunctionName(TransformStreamDefaultController.prototype.enqueue, "enqueue"); - setFunctionName(TransformStreamDefaultController.prototype.error, "error"); - setFunctionName(TransformStreamDefaultController.prototype.terminate, "terminate"); - if (typeof Symbol.toStringTag === "symbol") { - Object.defineProperty(TransformStreamDefaultController.prototype, Symbol.toStringTag, { - value: "TransformStreamDefaultController", - configurable: true - }); + function WritableStreamDefaultControllerClearAlgorithms(controller) { + controller._writeAlgorithm = void 0; + controller._closeAlgorithm = void 0; + controller._abortAlgorithm = void 0; + controller._strategySizeAlgorithm = void 0; } - function IsTransformStreamDefaultController(x2) { - if (!typeIsObject(x2)) { - return false; - } - if (!Object.prototype.hasOwnProperty.call(x2, "_controlledTransformStream")) { - return false; + function WritableStreamDefaultControllerClose(controller) { + EnqueueValueWithSize(controller, closeSentinel, 0); + WritableStreamDefaultControllerAdvanceQueueIfNeeded(controller); + } + function WritableStreamDefaultControllerGetChunkSize(controller, chunk) { + try { + return controller._strategySizeAlgorithm(chunk); + } catch (chunkSizeE) { + WritableStreamDefaultControllerErrorIfNeeded(controller, chunkSizeE); + return 1; } - return x2 instanceof TransformStreamDefaultController; } - function SetUpTransformStreamDefaultController(stream, controller, transformAlgorithm, flushAlgorithm, cancelAlgorithm) { - controller._controlledTransformStream = stream; - stream._transformStreamController = controller; - controller._transformAlgorithm = transformAlgorithm; - controller._flushAlgorithm = flushAlgorithm; - controller._cancelAlgorithm = cancelAlgorithm; - controller._finishPromise = void 0; - controller._finishPromise_resolve = void 0; - controller._finishPromise_reject = void 0; + function WritableStreamDefaultControllerGetDesiredSize(controller) { + return controller._strategyHWM - controller._queueTotalSize; } - function SetUpTransformStreamDefaultControllerFromTransformer(stream, transformer) { - const controller = Object.create(TransformStreamDefaultController.prototype); - let transformAlgorithm; - let flushAlgorithm; - let cancelAlgorithm; - if (transformer.transform !== void 0) { - transformAlgorithm = (chunk) => transformer.transform(chunk, controller); - } else { - transformAlgorithm = (chunk) => { - try { - TransformStreamDefaultControllerEnqueue(controller, chunk); - return promiseResolvedWith(void 0); - } catch (transformResultE) { - return promiseRejectedWith(transformResultE); - } - }; - } - if (transformer.flush !== void 0) { - flushAlgorithm = () => transformer.flush(controller); - } else { - flushAlgorithm = () => promiseResolvedWith(void 0); + function WritableStreamDefaultControllerWrite(controller, chunk, chunkSize) { + try { + EnqueueValueWithSize(controller, chunk, chunkSize); + } catch (enqueueE) { + WritableStreamDefaultControllerErrorIfNeeded(controller, enqueueE); + return; } - if (transformer.cancel !== void 0) { - cancelAlgorithm = (reason) => transformer.cancel(reason); - } else { - cancelAlgorithm = () => promiseResolvedWith(void 0); + const stream = controller._controlledWritableStream; + if (!WritableStreamCloseQueuedOrInFlight(stream) && stream._state === "writable") { + const backpressure = WritableStreamDefaultControllerGetBackpressure(controller); + WritableStreamUpdateBackpressure(stream, backpressure); } - SetUpTransformStreamDefaultController(stream, controller, transformAlgorithm, flushAlgorithm, cancelAlgorithm); - } - function TransformStreamDefaultControllerClearAlgorithms(controller) { - controller._transformAlgorithm = void 0; - controller._flushAlgorithm = void 0; - controller._cancelAlgorithm = void 0; + WritableStreamDefaultControllerAdvanceQueueIfNeeded(controller); } - function TransformStreamDefaultControllerEnqueue(controller, chunk) { - const stream = controller._controlledTransformStream; - const readableController = stream._readable._readableStreamController; - if (!ReadableStreamDefaultControllerCanCloseOrEnqueue(readableController)) { - throw new TypeError("Readable side is not in a state that permits enqueue"); + function WritableStreamDefaultControllerAdvanceQueueIfNeeded(controller) { + const stream = controller._controlledWritableStream; + if (!controller._started) { + return; } - try { - ReadableStreamDefaultControllerEnqueue(readableController, chunk); - } catch (e2) { - TransformStreamErrorWritableAndUnblockWrite(stream, e2); - throw stream._readable._storedError; + if (stream._inFlightWriteRequest !== void 0) { + return; } - const backpressure = ReadableStreamDefaultControllerHasBackpressure(readableController); - if (backpressure !== stream._backpressure) { - TransformStreamSetBackpressure(stream, true); + const state = stream._state; + if (state === "erroring") { + WritableStreamFinishErroring(stream); + return; + } + if (controller._queue.length === 0) { + return; + } + const value = PeekQueueValue(controller); + if (value === closeSentinel) { + WritableStreamDefaultControllerProcessClose(controller); + } else { + WritableStreamDefaultControllerProcessWrite(controller, value); } } - function TransformStreamDefaultControllerError(controller, e2) { - TransformStreamError(controller._controlledTransformStream, e2); + function WritableStreamDefaultControllerErrorIfNeeded(controller, error) { + if (controller._controlledWritableStream._state === "writable") { + WritableStreamDefaultControllerError(controller, error); + } } - function TransformStreamDefaultControllerPerformTransform(controller, chunk) { - const transformPromise = controller._transformAlgorithm(chunk); - return transformPromiseWith(transformPromise, void 0, (r2) => { - TransformStreamError(controller._controlledTransformStream, r2); - throw r2; + function WritableStreamDefaultControllerProcessClose(controller) { + const stream = controller._controlledWritableStream; + WritableStreamMarkCloseRequestInFlight(stream); + DequeueValue(controller); + const sinkClosePromise = controller._closeAlgorithm(); + WritableStreamDefaultControllerClearAlgorithms(controller); + uponPromise(sinkClosePromise, () => { + WritableStreamFinishInFlightClose(stream); + return null; + }, (reason) => { + WritableStreamFinishInFlightCloseWithError(stream, reason); + return null; }); } - function TransformStreamDefaultControllerTerminate(controller) { - const stream = controller._controlledTransformStream; - const readableController = stream._readable._readableStreamController; - ReadableStreamDefaultControllerClose(readableController); - const error = new TypeError("TransformStream terminated"); - TransformStreamErrorWritableAndUnblockWrite(stream, error); - } - function TransformStreamDefaultSinkWriteAlgorithm(stream, chunk) { - const controller = stream._transformStreamController; - if (stream._backpressure) { - const backpressureChangePromise = stream._backpressureChangePromise; - return transformPromiseWith(backpressureChangePromise, () => { - const writable = stream._writable; - const state = writable._state; - if (state === "erroring") { - throw writable._storedError; - } - return TransformStreamDefaultControllerPerformTransform(controller, chunk); - }); - } - return TransformStreamDefaultControllerPerformTransform(controller, chunk); - } - function TransformStreamDefaultSinkAbortAlgorithm(stream, reason) { - const controller = stream._transformStreamController; - if (controller._finishPromise !== void 0) { - return controller._finishPromise; - } - const readable = stream._readable; - controller._finishPromise = newPromise((resolve, reject) => { - controller._finishPromise_resolve = resolve; - controller._finishPromise_reject = reject; - }); - const cancelPromise = controller._cancelAlgorithm(reason); - TransformStreamDefaultControllerClearAlgorithms(controller); - uponPromise(cancelPromise, () => { - if (readable._state === "errored") { - defaultControllerFinishPromiseReject(controller, readable._storedError); - } else { - ReadableStreamDefaultControllerError(readable._readableStreamController, reason); - defaultControllerFinishPromiseResolve(controller); + function WritableStreamDefaultControllerProcessWrite(controller, chunk) { + const stream = controller._controlledWritableStream; + WritableStreamMarkFirstWriteRequestInFlight(stream); + const sinkWritePromise = controller._writeAlgorithm(chunk); + uponPromise(sinkWritePromise, () => { + WritableStreamFinishInFlightWrite(stream); + const state = stream._state; + DequeueValue(controller); + if (!WritableStreamCloseQueuedOrInFlight(stream) && state === "writable") { + const backpressure = WritableStreamDefaultControllerGetBackpressure(controller); + WritableStreamUpdateBackpressure(stream, backpressure); } + WritableStreamDefaultControllerAdvanceQueueIfNeeded(controller); return null; - }, (r2) => { - ReadableStreamDefaultControllerError(readable._readableStreamController, r2); - defaultControllerFinishPromiseReject(controller, r2); - return null; - }); - return controller._finishPromise; - } - function TransformStreamDefaultSinkCloseAlgorithm(stream) { - const controller = stream._transformStreamController; - if (controller._finishPromise !== void 0) { - return controller._finishPromise; - } - const readable = stream._readable; - controller._finishPromise = newPromise((resolve, reject) => { - controller._finishPromise_resolve = resolve; - controller._finishPromise_reject = reject; - }); - const flushPromise = controller._flushAlgorithm(); - TransformStreamDefaultControllerClearAlgorithms(controller); - uponPromise(flushPromise, () => { - if (readable._state === "errored") { - defaultControllerFinishPromiseReject(controller, readable._storedError); - } else { - ReadableStreamDefaultControllerClose(readable._readableStreamController); - defaultControllerFinishPromiseResolve(controller); + }, (reason) => { + if (stream._state === "writable") { + WritableStreamDefaultControllerClearAlgorithms(controller); } - return null; - }, (r2) => { - ReadableStreamDefaultControllerError(readable._readableStreamController, r2); - defaultControllerFinishPromiseReject(controller, r2); + WritableStreamFinishInFlightWriteWithError(stream, reason); return null; }); - return controller._finishPromise; } - function TransformStreamDefaultSourcePullAlgorithm(stream) { - TransformStreamSetBackpressure(stream, false); - return stream._backpressureChangePromise; + function WritableStreamDefaultControllerGetBackpressure(controller) { + const desiredSize = WritableStreamDefaultControllerGetDesiredSize(controller); + return desiredSize <= 0; } - function TransformStreamDefaultSourceCancelAlgorithm(stream, reason) { - const controller = stream._transformStreamController; - if (controller._finishPromise !== void 0) { - return controller._finishPromise; - } - const writable = stream._writable; - controller._finishPromise = newPromise((resolve, reject) => { - controller._finishPromise_resolve = resolve; - controller._finishPromise_reject = reject; - }); - const cancelPromise = controller._cancelAlgorithm(reason); - TransformStreamDefaultControllerClearAlgorithms(controller); - uponPromise(cancelPromise, () => { - if (writable._state === "errored") { - defaultControllerFinishPromiseReject(controller, writable._storedError); - } else { - WritableStreamDefaultControllerErrorIfNeeded(writable._writableStreamController, reason); - TransformStreamUnblockWrite(stream); - defaultControllerFinishPromiseResolve(controller); - } - return null; - }, (r2) => { - WritableStreamDefaultControllerErrorIfNeeded(writable._writableStreamController, r2); - TransformStreamUnblockWrite(stream); - defaultControllerFinishPromiseReject(controller, r2); - return null; + function WritableStreamDefaultControllerError(controller, error) { + const stream = controller._controlledWritableStream; + WritableStreamDefaultControllerClearAlgorithms(controller); + WritableStreamStartErroring(stream, error); + } + function streamBrandCheckException$2(name) { + return new TypeError(`WritableStream.prototype.${name} can only be used on a WritableStream`); + } + function defaultControllerBrandCheckException$2(name) { + return new TypeError(`WritableStreamDefaultController.prototype.${name} can only be used on a WritableStreamDefaultController`); + } + function defaultWriterBrandCheckException(name) { + return new TypeError(`WritableStreamDefaultWriter.prototype.${name} can only be used on a WritableStreamDefaultWriter`); + } + function defaultWriterLockException(name) { + return new TypeError("Cannot " + name + " a stream using a released writer"); + } + function defaultWriterClosedPromiseInitialize(writer) { + writer._closedPromise = newPromise((resolve, reject) => { + writer._closedPromise_resolve = resolve; + writer._closedPromise_reject = reject; + writer._closedPromiseState = "pending"; }); - return controller._finishPromise; } - function defaultControllerBrandCheckException(name) { - return new TypeError(`TransformStreamDefaultController.prototype.${name} can only be used on a TransformStreamDefaultController`); + function defaultWriterClosedPromiseInitializeAsRejected(writer, reason) { + defaultWriterClosedPromiseInitialize(writer); + defaultWriterClosedPromiseReject(writer, reason); } - function defaultControllerFinishPromiseResolve(controller) { - if (controller._finishPromise_resolve === void 0) { - return; - } - controller._finishPromise_resolve(); - controller._finishPromise_resolve = void 0; - controller._finishPromise_reject = void 0; + function defaultWriterClosedPromiseInitializeAsResolved(writer) { + defaultWriterClosedPromiseInitialize(writer); + defaultWriterClosedPromiseResolve(writer); } - function defaultControllerFinishPromiseReject(controller, reason) { - if (controller._finishPromise_reject === void 0) { + function defaultWriterClosedPromiseReject(writer, reason) { + if (writer._closedPromise_reject === void 0) { return; } - setPromiseIsHandledToTrue(controller._finishPromise); - controller._finishPromise_reject(reason); - controller._finishPromise_resolve = void 0; - controller._finishPromise_reject = void 0; + setPromiseIsHandledToTrue(writer._closedPromise); + writer._closedPromise_reject(reason); + writer._closedPromise_resolve = void 0; + writer._closedPromise_reject = void 0; + writer._closedPromiseState = "rejected"; } - function streamBrandCheckException(name) { - return new TypeError(`TransformStream.prototype.${name} can only be used on a TransformStream`); + function defaultWriterClosedPromiseResetToRejected(writer, reason) { + defaultWriterClosedPromiseInitializeAsRejected(writer, reason); } - exports3.ByteLengthQueuingStrategy = ByteLengthQueuingStrategy; - exports3.CountQueuingStrategy = CountQueuingStrategy; - exports3.ReadableByteStreamController = ReadableByteStreamController; - exports3.ReadableStream = ReadableStream2; - exports3.ReadableStreamBYOBReader = ReadableStreamBYOBReader; - exports3.ReadableStreamBYOBRequest = ReadableStreamBYOBRequest; - exports3.ReadableStreamDefaultController = ReadableStreamDefaultController; - exports3.ReadableStreamDefaultReader = ReadableStreamDefaultReader; - exports3.TransformStream = TransformStream2; - exports3.TransformStreamDefaultController = TransformStreamDefaultController; - exports3.WritableStream = WritableStream; - exports3.WritableStreamDefaultController = WritableStreamDefaultController; - exports3.WritableStreamDefaultWriter = WritableStreamDefaultWriter; - }); - } -}); - -// node_modules/fetch-blob/streams.cjs -var require_streams = __commonJS({ - "node_modules/fetch-blob/streams.cjs"() { - "use strict"; - var POOL_SIZE2 = 65536; - if (!globalThis.ReadableStream) { - try { - const process3 = require("process"); - const { emitWarning } = process3; - try { - process3.emitWarning = () => { - }; - Object.assign(globalThis, require("stream/web")); - process3.emitWarning = emitWarning; - } catch (error) { - process3.emitWarning = emitWarning; - throw error; + function defaultWriterClosedPromiseResolve(writer) { + if (writer._closedPromise_resolve === void 0) { + return; } - } catch (error) { - Object.assign(globalThis, require_ponyfill_es2018()); + writer._closedPromise_resolve(void 0); + writer._closedPromise_resolve = void 0; + writer._closedPromise_reject = void 0; + writer._closedPromiseState = "resolved"; } - } - try { - const { Blob: Blob4 } = require("buffer"); - if (Blob4 && !Blob4.prototype.stream) { - Blob4.prototype.stream = function name(params) { - let position = 0; - const blob = this; - return new ReadableStream({ - type: "bytes", - async pull(ctrl) { - const chunk = blob.slice(position, Math.min(blob.size, position + POOL_SIZE2)); - const buffer = await chunk.arrayBuffer(); - position += buffer.byteLength; - ctrl.enqueue(new Uint8Array(buffer)); - if (position === blob.size) { - ctrl.close(); - } - } - }); - }; + function defaultWriterReadyPromiseInitialize(writer) { + writer._readyPromise = newPromise((resolve, reject) => { + writer._readyPromise_resolve = resolve; + writer._readyPromise_reject = reject; + }); + writer._readyPromiseState = "pending"; } - } catch (error) { - } - } -}); - -// node_modules/fetch-blob/index.js -async function* toIterator(parts, clone2 = true) { - for (const part of parts) { - if ("stream" in part) { - yield* ( - /** @type {AsyncIterableIterator} */ - part.stream() - ); - } else if (ArrayBuffer.isView(part)) { - if (clone2) { - let position = part.byteOffset; - const end = part.byteOffset + part.byteLength; - while (position !== end) { - const size = Math.min(end - position, POOL_SIZE); - const chunk = part.buffer.slice(position, position + size); - position += chunk.byteLength; - yield new Uint8Array(chunk); - } - } else { - yield part; + function defaultWriterReadyPromiseInitializeAsRejected(writer, reason) { + defaultWriterReadyPromiseInitialize(writer); + defaultWriterReadyPromiseReject(writer, reason); } - } else { - let position = 0, b = ( - /** @type {Blob} */ - part - ); - while (position !== b.size) { - const chunk = b.slice(position, Math.min(b.size, position + POOL_SIZE)); - const buffer = await chunk.arrayBuffer(); - position += buffer.byteLength; - yield new Uint8Array(buffer); + function defaultWriterReadyPromiseInitializeAsResolved(writer) { + defaultWriterReadyPromiseInitialize(writer); + defaultWriterReadyPromiseResolve(writer); } - } - } -} -var import_streams, POOL_SIZE, _Blob, Blob3, fetch_blob_default; -var init_fetch_blob = __esm({ - "node_modules/fetch-blob/index.js"() { - "use strict"; - import_streams = __toESM(require_streams(), 1); - POOL_SIZE = 65536; - _Blob = class Blob2 { - /** @type {Array.<(Blob|Uint8Array)>} */ - #parts = []; - #type = ""; - #size = 0; - #endings = "transparent"; - /** - * The Blob() constructor returns a new Blob object. The content - * of the blob consists of the concatenation of the values given - * in the parameter array. - * - * @param {*} blobParts - * @param {{ type?: string, endings?: string }} [options] - */ - constructor(blobParts = [], options = {}) { - if (typeof blobParts !== "object" || blobParts === null) { - throw new TypeError("Failed to construct 'Blob': The provided value cannot be converted to a sequence."); - } - if (typeof blobParts[Symbol.iterator] !== "function") { - throw new TypeError("Failed to construct 'Blob': The object must have a callable @@iterator property."); - } - if (typeof options !== "object" && typeof options !== "function") { - throw new TypeError("Failed to construct 'Blob': parameter 2 cannot convert to dictionary."); - } - if (options === null) - options = {}; - const encoder = new TextEncoder(); - for (const element of blobParts) { - let part; - if (ArrayBuffer.isView(element)) { - part = new Uint8Array(element.buffer.slice(element.byteOffset, element.byteOffset + element.byteLength)); - } else if (element instanceof ArrayBuffer) { - part = new Uint8Array(element.slice(0)); - } else if (element instanceof Blob2) { - part = element; - } else { - part = encoder.encode(`${element}`); - } - this.#size += ArrayBuffer.isView(part) ? part.byteLength : part.size; - this.#parts.push(part); + function defaultWriterReadyPromiseReject(writer, reason) { + if (writer._readyPromise_reject === void 0) { + return; } - this.#endings = `${options.endings === void 0 ? "transparent" : options.endings}`; - const type = options.type === void 0 ? "" : String(options.type); - this.#type = /^[\x20-\x7E]*$/.test(type) ? type : ""; + setPromiseIsHandledToTrue(writer._readyPromise); + writer._readyPromise_reject(reason); + writer._readyPromise_resolve = void 0; + writer._readyPromise_reject = void 0; + writer._readyPromiseState = "rejected"; } - /** - * The Blob interface's size property returns the - * size of the Blob in bytes. - */ - get size() { - return this.#size; + function defaultWriterReadyPromiseReset(writer) { + defaultWriterReadyPromiseInitialize(writer); } - /** - * The type property of a Blob object returns the MIME type of the file. - */ - get type() { - return this.#type; + function defaultWriterReadyPromiseResetToRejected(writer, reason) { + defaultWriterReadyPromiseInitializeAsRejected(writer, reason); } - /** - * The text() method in the Blob interface returns a Promise - * that resolves with a string containing the contents of - * the blob, interpreted as UTF-8. - * - * @return {Promise} - */ - async text() { - const decoder = new TextDecoder(); - let str = ""; - for await (const part of toIterator(this.#parts, false)) { - str += decoder.decode(part, { stream: true }); + function defaultWriterReadyPromiseResolve(writer) { + if (writer._readyPromise_resolve === void 0) { + return; } - str += decoder.decode(); - return str; + writer._readyPromise_resolve(void 0); + writer._readyPromise_resolve = void 0; + writer._readyPromise_reject = void 0; + writer._readyPromiseState = "fulfilled"; } - /** - * The arrayBuffer() method in the Blob interface returns a - * Promise that resolves with the contents of the blob as - * binary data contained in an ArrayBuffer. - * - * @return {Promise} - */ - async arrayBuffer() { - const data = new Uint8Array(this.size); - let offset = 0; - for await (const chunk of toIterator(this.#parts, false)) { - data.set(chunk, offset); - offset += chunk.length; + function getGlobals() { + if (typeof globalThis !== "undefined") { + return globalThis; + } else if (typeof self !== "undefined") { + return self; + } else if (typeof global !== "undefined") { + return global; } - return data.buffer; - } - stream() { - const it = toIterator(this.#parts, true); - return new globalThis.ReadableStream({ - // @ts-ignore - type: "bytes", - async pull(ctrl) { - const chunk = await it.next(); - chunk.done ? ctrl.close() : ctrl.enqueue(chunk.value); - }, - async cancel() { - await it.return(); - } - }); + return void 0; } - /** - * The Blob interface's slice() method creates and returns a - * new Blob object which contains data from a subset of the - * blob on which it's called. - * - * @param {number} [start] - * @param {number} [end] - * @param {string} [type] - */ - slice(start = 0, end = this.size, type = "") { - const { size } = this; - let relativeStart = start < 0 ? Math.max(size + start, 0) : Math.min(start, size); - let relativeEnd = end < 0 ? Math.max(size + end, 0) : Math.min(end, size); - const span = Math.max(relativeEnd - relativeStart, 0); - const parts = this.#parts; - const blobParts = []; - let added = 0; - for (const part of parts) { - if (added >= span) { - break; - } - const size2 = ArrayBuffer.isView(part) ? part.byteLength : part.size; - if (relativeStart && size2 <= relativeStart) { - relativeStart -= size2; - relativeEnd -= size2; - } else { - let chunk; - if (ArrayBuffer.isView(part)) { - chunk = part.subarray(relativeStart, Math.min(size2, relativeEnd)); - added += chunk.byteLength; - } else { - chunk = part.slice(relativeStart, Math.min(size2, relativeEnd)); - added += chunk.size; - } - relativeEnd -= size2; - blobParts.push(chunk); - relativeStart = 0; - } + const globals = getGlobals(); + function isDOMExceptionConstructor(ctor) { + if (!(typeof ctor === "function" || typeof ctor === "object")) { + return false; } - const blob = new Blob2([], { type: String(type).toLowerCase() }); - blob.#size = span; - blob.#parts = blobParts; - return blob; - } - get [Symbol.toStringTag]() { - return "Blob"; - } - static [Symbol.hasInstance](object) { - return object && typeof object === "object" && typeof object.constructor === "function" && (typeof object.stream === "function" || typeof object.arrayBuffer === "function") && /^(Blob|File)$/.test(object[Symbol.toStringTag]); - } - }; - Object.defineProperties(_Blob.prototype, { - size: { enumerable: true }, - type: { enumerable: true }, - slice: { enumerable: true } - }); - Blob3 = _Blob; - fetch_blob_default = Blob3; - } -}); - -// node_modules/fetch-blob/file.js -var _File, File2, file_default; -var init_file = __esm({ - "node_modules/fetch-blob/file.js"() { - "use strict"; - init_fetch_blob(); - _File = class File extends fetch_blob_default { - #lastModified = 0; - #name = ""; - /** - * @param {*[]} fileBits - * @param {string} fileName - * @param {{lastModified?: number, type?: string}} options - */ - // @ts-ignore - constructor(fileBits, fileName, options = {}) { - if (arguments.length < 2) { - throw new TypeError(`Failed to construct 'File': 2 arguments required, but only ${arguments.length} present.`); + if (ctor.name !== "DOMException") { + return false; } - super(fileBits, options); - if (options === null) - options = {}; - const lastModified = options.lastModified === void 0 ? Date.now() : Number(options.lastModified); - if (!Number.isNaN(lastModified)) { - this.#lastModified = lastModified; + try { + new ctor(); + return true; + } catch (_a3) { + return false; } - this.#name = String(fileName); } - get name() { - return this.#name; + function getFromGlobal() { + const ctor = globals === null || globals === void 0 ? void 0 : globals.DOMException; + return isDOMExceptionConstructor(ctor) ? ctor : void 0; } - get lastModified() { - return this.#lastModified; + function createPolyfill() { + const ctor = function DOMException4(message, name) { + this.message = message || ""; + this.name = name || "Error"; + if (Error.captureStackTrace) { + Error.captureStackTrace(this, this.constructor); + } + }; + setFunctionName(ctor, "DOMException"); + ctor.prototype = Object.create(Error.prototype); + Object.defineProperty(ctor.prototype, "constructor", { value: ctor, writable: true, configurable: true }); + return ctor; } - get [Symbol.toStringTag]() { - return "File"; + const DOMException3 = getFromGlobal() || createPolyfill(); + function ReadableStreamPipeTo(source, dest, preventClose, preventAbort, preventCancel, signal) { + const reader = AcquireReadableStreamDefaultReader(source); + const writer = AcquireWritableStreamDefaultWriter(dest); + source._disturbed = true; + let shuttingDown = false; + let currentWrite = promiseResolvedWith(void 0); + return newPromise((resolve, reject) => { + let abortAlgorithm; + if (signal !== void 0) { + abortAlgorithm = () => { + const error = signal.reason !== void 0 ? signal.reason : new DOMException3("Aborted", "AbortError"); + const actions = []; + if (!preventAbort) { + actions.push(() => { + if (dest._state === "writable") { + return WritableStreamAbort(dest, error); + } + return promiseResolvedWith(void 0); + }); + } + if (!preventCancel) { + actions.push(() => { + if (source._state === "readable") { + return ReadableStreamCancel(source, error); + } + return promiseResolvedWith(void 0); + }); + } + shutdownWithAction(() => Promise.all(actions.map((action) => action())), true, error); + }; + if (signal.aborted) { + abortAlgorithm(); + return; + } + signal.addEventListener("abort", abortAlgorithm); + } + function pipeLoop() { + return newPromise((resolveLoop, rejectLoop) => { + function next(done) { + if (done) { + resolveLoop(); + } else { + PerformPromiseThen(pipeStep(), next, rejectLoop); + } + } + next(false); + }); + } + function pipeStep() { + if (shuttingDown) { + return promiseResolvedWith(true); + } + return PerformPromiseThen(writer._readyPromise, () => { + return newPromise((resolveRead, rejectRead) => { + ReadableStreamDefaultReaderRead(reader, { + _chunkSteps: (chunk) => { + currentWrite = PerformPromiseThen(WritableStreamDefaultWriterWrite(writer, chunk), void 0, noop4); + resolveRead(false); + }, + _closeSteps: () => resolveRead(true), + _errorSteps: rejectRead + }); + }); + }); + } + isOrBecomesErrored(source, reader._closedPromise, (storedError) => { + if (!preventAbort) { + shutdownWithAction(() => WritableStreamAbort(dest, storedError), true, storedError); + } else { + shutdown(true, storedError); + } + return null; + }); + isOrBecomesErrored(dest, writer._closedPromise, (storedError) => { + if (!preventCancel) { + shutdownWithAction(() => ReadableStreamCancel(source, storedError), true, storedError); + } else { + shutdown(true, storedError); + } + return null; + }); + isOrBecomesClosed(source, reader._closedPromise, () => { + if (!preventClose) { + shutdownWithAction(() => WritableStreamDefaultWriterCloseWithErrorPropagation(writer)); + } else { + shutdown(); + } + return null; + }); + if (WritableStreamCloseQueuedOrInFlight(dest) || dest._state === "closed") { + const destClosed = new TypeError("the destination writable stream closed before all data could be piped to it"); + if (!preventCancel) { + shutdownWithAction(() => ReadableStreamCancel(source, destClosed), true, destClosed); + } else { + shutdown(true, destClosed); + } + } + setPromiseIsHandledToTrue(pipeLoop()); + function waitForWritesToFinish() { + const oldCurrentWrite = currentWrite; + return PerformPromiseThen(currentWrite, () => oldCurrentWrite !== currentWrite ? waitForWritesToFinish() : void 0); + } + function isOrBecomesErrored(stream, promise, action) { + if (stream._state === "errored") { + action(stream._storedError); + } else { + uponRejection(promise, action); + } + } + function isOrBecomesClosed(stream, promise, action) { + if (stream._state === "closed") { + action(); + } else { + uponFulfillment(promise, action); + } + } + function shutdownWithAction(action, originalIsError, originalError) { + if (shuttingDown) { + return; + } + shuttingDown = true; + if (dest._state === "writable" && !WritableStreamCloseQueuedOrInFlight(dest)) { + uponFulfillment(waitForWritesToFinish(), doTheRest); + } else { + doTheRest(); + } + function doTheRest() { + uponPromise(action(), () => finalize(originalIsError, originalError), (newError) => finalize(true, newError)); + return null; + } + } + function shutdown(isError, error) { + if (shuttingDown) { + return; + } + shuttingDown = true; + if (dest._state === "writable" && !WritableStreamCloseQueuedOrInFlight(dest)) { + uponFulfillment(waitForWritesToFinish(), () => finalize(isError, error)); + } else { + finalize(isError, error); + } + } + function finalize(isError, error) { + WritableStreamDefaultWriterRelease(writer); + ReadableStreamReaderGenericRelease(reader); + if (signal !== void 0) { + signal.removeEventListener("abort", abortAlgorithm); + } + if (isError) { + reject(error); + } else { + resolve(void 0); + } + return null; + } + }); } - static [Symbol.hasInstance](object) { - return !!object && object instanceof fetch_blob_default && /^(File)$/.test(object[Symbol.toStringTag]); + class ReadableStreamDefaultController { + constructor() { + throw new TypeError("Illegal constructor"); + } + /** + * Returns the desired size to fill the controlled stream's internal queue. It can be negative, if the queue is + * over-full. An underlying source ought to use this information to determine when and how to apply backpressure. + */ + get desiredSize() { + if (!IsReadableStreamDefaultController(this)) { + throw defaultControllerBrandCheckException$1("desiredSize"); + } + return ReadableStreamDefaultControllerGetDesiredSize(this); + } + /** + * Closes the controlled readable stream. Consumers will still be able to read any previously-enqueued chunks from + * the stream, but once those are read, the stream will become closed. + */ + close() { + if (!IsReadableStreamDefaultController(this)) { + throw defaultControllerBrandCheckException$1("close"); + } + if (!ReadableStreamDefaultControllerCanCloseOrEnqueue(this)) { + throw new TypeError("The stream is not in a state that permits close"); + } + ReadableStreamDefaultControllerClose(this); + } + enqueue(chunk = void 0) { + if (!IsReadableStreamDefaultController(this)) { + throw defaultControllerBrandCheckException$1("enqueue"); + } + if (!ReadableStreamDefaultControllerCanCloseOrEnqueue(this)) { + throw new TypeError("The stream is not in a state that permits enqueue"); + } + return ReadableStreamDefaultControllerEnqueue(this, chunk); + } + /** + * Errors the controlled readable stream, making all future interactions with it fail with the given error `e`. + */ + error(e2 = void 0) { + if (!IsReadableStreamDefaultController(this)) { + throw defaultControllerBrandCheckException$1("error"); + } + ReadableStreamDefaultControllerError(this, e2); + } + /** @internal */ + [CancelSteps](reason) { + ResetQueue(this); + const result = this._cancelAlgorithm(reason); + ReadableStreamDefaultControllerClearAlgorithms(this); + return result; + } + /** @internal */ + [PullSteps](readRequest) { + const stream = this._controlledReadableStream; + if (this._queue.length > 0) { + const chunk = DequeueValue(this); + if (this._closeRequested && this._queue.length === 0) { + ReadableStreamDefaultControllerClearAlgorithms(this); + ReadableStreamClose(stream); + } else { + ReadableStreamDefaultControllerCallPullIfNeeded(this); + } + readRequest._chunkSteps(chunk); + } else { + ReadableStreamAddReadRequest(stream, readRequest); + ReadableStreamDefaultControllerCallPullIfNeeded(this); + } + } + /** @internal */ + [ReleaseSteps]() { + } } - }; - File2 = _File; - file_default = File2; - } -}); - -// node_modules/formdata-polyfill/esm.min.js -function formDataToBlob(F2, B = fetch_blob_default) { - var b = `${r()}${r()}`.replace(/\./g, "").slice(-28).padStart(32, "-"), c = [], p = `--${b}\r -Content-Disposition: form-data; name="`; - F2.forEach((v, n) => typeof v == "string" ? c.push(p + e(n) + `"\r -\r -${v.replace(/\r(?!\n)|(? (a += "", /^(Blob|File)$/.test(b && b[t]) ? [(c = c !== void 0 ? c + "" : b[t] == "File" ? b.name : "blob", a), b.name !== c || b[t] == "blob" ? new file_default([b], c, b) : b] : [a, b + ""]); - e = (c, f3) => (f3 ? c : c.replace(/\r?\n|\r/g, "\r\n")).replace(/\n/g, "%0A").replace(/\r/g, "%0D").replace(/"/g, "%22"); - x = (n, a, e2) => { - if (a.length < e2) { - throw new TypeError(`Failed to execute '${n}' on 'FormData': ${e2} arguments required, but only ${a.length} present.`); + Object.defineProperties(ReadableStreamDefaultController.prototype, { + close: { enumerable: true }, + enqueue: { enumerable: true }, + error: { enumerable: true }, + desiredSize: { enumerable: true } + }); + setFunctionName(ReadableStreamDefaultController.prototype.close, "close"); + setFunctionName(ReadableStreamDefaultController.prototype.enqueue, "enqueue"); + setFunctionName(ReadableStreamDefaultController.prototype.error, "error"); + if (typeof Symbol.toStringTag === "symbol") { + Object.defineProperty(ReadableStreamDefaultController.prototype, Symbol.toStringTag, { + value: "ReadableStreamDefaultController", + configurable: true + }); } - }; - FormData = class FormData2 { - #d = []; - constructor(...a) { - if (a.length) - throw new TypeError(`Failed to construct 'FormData': parameter 1 is not of type 'HTMLFormElement'.`); + function IsReadableStreamDefaultController(x2) { + if (!typeIsObject(x2)) { + return false; + } + if (!Object.prototype.hasOwnProperty.call(x2, "_controlledReadableStream")) { + return false; + } + return x2 instanceof ReadableStreamDefaultController; } - get [t]() { - return "FormData"; + function ReadableStreamDefaultControllerCallPullIfNeeded(controller) { + const shouldPull = ReadableStreamDefaultControllerShouldCallPull(controller); + if (!shouldPull) { + return; + } + if (controller._pulling) { + controller._pullAgain = true; + return; + } + controller._pulling = true; + const pullPromise = controller._pullAlgorithm(); + uponPromise(pullPromise, () => { + controller._pulling = false; + if (controller._pullAgain) { + controller._pullAgain = false; + ReadableStreamDefaultControllerCallPullIfNeeded(controller); + } + return null; + }, (e2) => { + ReadableStreamDefaultControllerError(controller, e2); + return null; + }); } - [i]() { - return this.entries(); + function ReadableStreamDefaultControllerShouldCallPull(controller) { + const stream = controller._controlledReadableStream; + if (!ReadableStreamDefaultControllerCanCloseOrEnqueue(controller)) { + return false; + } + if (!controller._started) { + return false; + } + if (IsReadableStreamLocked(stream) && ReadableStreamGetNumReadRequests(stream) > 0) { + return true; + } + const desiredSize = ReadableStreamDefaultControllerGetDesiredSize(controller); + if (desiredSize > 0) { + return true; + } + return false; } - static [h](o) { - return o && typeof o === "object" && o[t] === "FormData" && !m.some((m2) => typeof o[m2] != "function"); + function ReadableStreamDefaultControllerClearAlgorithms(controller) { + controller._pullAlgorithm = void 0; + controller._cancelAlgorithm = void 0; + controller._strategySizeAlgorithm = void 0; } - append(...a) { - x("append", arguments, 2); - this.#d.push(f(...a)); + function ReadableStreamDefaultControllerClose(controller) { + if (!ReadableStreamDefaultControllerCanCloseOrEnqueue(controller)) { + return; + } + const stream = controller._controlledReadableStream; + controller._closeRequested = true; + if (controller._queue.length === 0) { + ReadableStreamDefaultControllerClearAlgorithms(controller); + ReadableStreamClose(stream); + } } - delete(a) { - x("delete", arguments, 1); - a += ""; - this.#d = this.#d.filter(([b]) => b !== a); + function ReadableStreamDefaultControllerEnqueue(controller, chunk) { + if (!ReadableStreamDefaultControllerCanCloseOrEnqueue(controller)) { + return; + } + const stream = controller._controlledReadableStream; + if (IsReadableStreamLocked(stream) && ReadableStreamGetNumReadRequests(stream) > 0) { + ReadableStreamFulfillReadRequest(stream, chunk, false); + } else { + let chunkSize; + try { + chunkSize = controller._strategySizeAlgorithm(chunk); + } catch (chunkSizeE) { + ReadableStreamDefaultControllerError(controller, chunkSizeE); + throw chunkSizeE; + } + try { + EnqueueValueWithSize(controller, chunk, chunkSize); + } catch (enqueueE) { + ReadableStreamDefaultControllerError(controller, enqueueE); + throw enqueueE; + } + } + ReadableStreamDefaultControllerCallPullIfNeeded(controller); } - get(a) { - x("get", arguments, 1); - a += ""; - for (var b = this.#d, l = b.length, c = 0; c < l; c++) - if (b[c][0] === a) - return b[c][1]; - return null; + function ReadableStreamDefaultControllerError(controller, e2) { + const stream = controller._controlledReadableStream; + if (stream._state !== "readable") { + return; + } + ResetQueue(controller); + ReadableStreamDefaultControllerClearAlgorithms(controller); + ReadableStreamError(stream, e2); } - getAll(a, b) { - x("getAll", arguments, 1); - b = []; - a += ""; - this.#d.forEach((c) => c[0] === a && b.push(c[1])); - return b; + function ReadableStreamDefaultControllerGetDesiredSize(controller) { + const state = controller._controlledReadableStream._state; + if (state === "errored") { + return null; + } + if (state === "closed") { + return 0; + } + return controller._strategyHWM - controller._queueTotalSize; } - has(a) { - x("has", arguments, 1); - a += ""; - return this.#d.some((b) => b[0] === a); + function ReadableStreamDefaultControllerHasBackpressure(controller) { + if (ReadableStreamDefaultControllerShouldCallPull(controller)) { + return false; + } + return true; } - forEach(a, b) { - x("forEach", arguments, 1); - for (var [c, d] of this) - a.call(b, d, c, this); + function ReadableStreamDefaultControllerCanCloseOrEnqueue(controller) { + const state = controller._controlledReadableStream._state; + if (!controller._closeRequested && state === "readable") { + return true; + } + return false; } - set(...a) { - x("set", arguments, 2); - var b = [], c = true; - a = f(...a); - this.#d.forEach((d) => { - d[0] === a[0] ? c && (c = !b.push(a)) : b.push(d); + function SetUpReadableStreamDefaultController(stream, controller, startAlgorithm, pullAlgorithm, cancelAlgorithm, highWaterMark, sizeAlgorithm) { + controller._controlledReadableStream = stream; + controller._queue = void 0; + controller._queueTotalSize = void 0; + ResetQueue(controller); + controller._started = false; + controller._closeRequested = false; + controller._pullAgain = false; + controller._pulling = false; + controller._strategySizeAlgorithm = sizeAlgorithm; + controller._strategyHWM = highWaterMark; + controller._pullAlgorithm = pullAlgorithm; + controller._cancelAlgorithm = cancelAlgorithm; + stream._readableStreamController = controller; + const startResult = startAlgorithm(); + uponPromise(promiseResolvedWith(startResult), () => { + controller._started = true; + ReadableStreamDefaultControllerCallPullIfNeeded(controller); + return null; + }, (r2) => { + ReadableStreamDefaultControllerError(controller, r2); + return null; }); - c && b.push(a); - this.#d = b; - } - *entries() { - yield* this.#d; } - *keys() { - for (var [a] of this) - yield a; + function SetUpReadableStreamDefaultControllerFromUnderlyingSource(stream, underlyingSource, highWaterMark, sizeAlgorithm) { + const controller = Object.create(ReadableStreamDefaultController.prototype); + let startAlgorithm; + let pullAlgorithm; + let cancelAlgorithm; + if (underlyingSource.start !== void 0) { + startAlgorithm = () => underlyingSource.start(controller); + } else { + startAlgorithm = () => void 0; + } + if (underlyingSource.pull !== void 0) { + pullAlgorithm = () => underlyingSource.pull(controller); + } else { + pullAlgorithm = () => promiseResolvedWith(void 0); + } + if (underlyingSource.cancel !== void 0) { + cancelAlgorithm = (reason) => underlyingSource.cancel(reason); + } else { + cancelAlgorithm = () => promiseResolvedWith(void 0); + } + SetUpReadableStreamDefaultController(stream, controller, startAlgorithm, pullAlgorithm, cancelAlgorithm, highWaterMark, sizeAlgorithm); } - *values() { - for (var [, a] of this) - yield a; + function defaultControllerBrandCheckException$1(name) { + return new TypeError(`ReadableStreamDefaultController.prototype.${name} can only be used on a ReadableStreamDefaultController`); } - }; - } -}); - -// node_modules/node-domexception/index.js -var require_node_domexception = __commonJS({ - "node_modules/node-domexception/index.js"(exports2, module2) { - "use strict"; - if (!globalThis.DOMException) { - try { - const { MessageChannel } = require("worker_threads"), port = new MessageChannel().port1, ab = new ArrayBuffer(); - port.postMessage(ab, [ab, ab]); - } catch (err) { - err.constructor.name === "DOMException" && (globalThis.DOMException = err.constructor); + function ReadableStreamTee(stream, cloneForBranch2) { + if (IsReadableByteStreamController(stream._readableStreamController)) { + return ReadableByteStreamTee(stream); + } + return ReadableStreamDefaultTee(stream); } - } - module2.exports = globalThis.DOMException; - } -}); - -// node_modules/fetch-blob/from.js -var import_node_fs, import_node_path, import_node_domexception, stat; -var init_from = __esm({ - "node_modules/fetch-blob/from.js"() { - "use strict"; - import_node_fs = require("fs"); - import_node_path = require("path"); - import_node_domexception = __toESM(require_node_domexception(), 1); - init_file(); - init_fetch_blob(); - ({ stat } = import_node_fs.promises); - } -}); - -// node_modules/node-fetch/src/utils/multipart-parser.js -var multipart_parser_exports = {}; -__export(multipart_parser_exports, { - toFormData: () => toFormData -}); -function _fileName(headerValue) { - const m2 = headerValue.match(/\bfilename=("(.*?)"|([^()<>@,;:\\"/[\]?={}\s\t]+))($|;\s)/i); - if (!m2) { - return; - } - const match2 = m2[2] || m2[3] || ""; - let filename = match2.slice(match2.lastIndexOf("\\") + 1); - filename = filename.replace(/%22/g, '"'); - filename = filename.replace(/&#(\d{4});/g, (m3, code) => { - return String.fromCharCode(code); - }); - return filename; -} -async function toFormData(Body2, ct) { - if (!/multipart/i.test(ct)) { - throw new TypeError("Failed to fetch"); - } - const m2 = ct.match(/boundary=(?:"([^"]+)"|([^;]+))/i); - if (!m2) { - throw new TypeError("no or bad content-type header, no multipart boundary"); - } - const parser3 = new MultipartParser(m2[1] || m2[2]); - let headerField; - let headerValue; - let entryValue; - let entryName; - let contentType; - let filename; - const entryChunks = []; - const formData = new FormData(); - const onPartData = (ui8a) => { - entryValue += decoder.decode(ui8a, { stream: true }); - }; - const appendToFile = (ui8a) => { - entryChunks.push(ui8a); - }; - const appendFileToFormData = () => { - const file = new file_default(entryChunks, filename, { type: contentType }); - formData.append(entryName, file); - }; - const appendEntryToFormData = () => { - formData.append(entryName, entryValue); - }; - const decoder = new TextDecoder("utf-8"); - decoder.decode(); - parser3.onPartBegin = function() { - parser3.onPartData = onPartData; - parser3.onPartEnd = appendEntryToFormData; - headerField = ""; - headerValue = ""; - entryValue = ""; - entryName = ""; - contentType = ""; - filename = null; - entryChunks.length = 0; - }; - parser3.onHeaderField = function(ui8a) { - headerField += decoder.decode(ui8a, { stream: true }); - }; - parser3.onHeaderValue = function(ui8a) { - headerValue += decoder.decode(ui8a, { stream: true }); - }; - parser3.onHeaderEnd = function() { - headerValue += decoder.decode(); - headerField = headerField.toLowerCase(); - if (headerField === "content-disposition") { - const m3 = headerValue.match(/\bname=("([^"]*)"|([^()<>@,;:\\"/[\]?={}\s\t]+))/i); - if (m3) { - entryName = m3[2] || m3[3] || ""; - } - filename = _fileName(headerValue); - if (filename) { - parser3.onPartData = appendToFile; - parser3.onPartEnd = appendFileToFormData; - } - } else if (headerField === "content-type") { - contentType = headerValue; - } - headerValue = ""; - headerField = ""; - }; - for await (const chunk of Body2) { - parser3.write(chunk); - } - parser3.end(); - return formData; -} -var s, S, f2, F, LF, CR, SPACE, HYPHEN, COLON, A, Z, lower, noop2, MultipartParser; -var init_multipart_parser = __esm({ - "node_modules/node-fetch/src/utils/multipart-parser.js"() { - "use strict"; - init_from(); - init_esm_min(); - s = 0; - S = { - START_BOUNDARY: s++, - HEADER_FIELD_START: s++, - HEADER_FIELD: s++, - HEADER_VALUE_START: s++, - HEADER_VALUE: s++, - HEADER_VALUE_ALMOST_DONE: s++, - HEADERS_ALMOST_DONE: s++, - PART_DATA_START: s++, - PART_DATA: s++, - END: s++ - }; - f2 = 1; - F = { - PART_BOUNDARY: f2, - LAST_BOUNDARY: f2 *= 2 - }; - LF = 10; - CR = 13; - SPACE = 32; - HYPHEN = 45; - COLON = 58; - A = 97; - Z = 122; - lower = (c) => c | 32; - noop2 = () => { - }; - MultipartParser = class { - /** - * @param {string} boundary - */ - constructor(boundary) { - this.index = 0; - this.flags = 0; - this.onHeaderEnd = noop2; - this.onHeaderField = noop2; - this.onHeadersEnd = noop2; - this.onHeaderValue = noop2; - this.onPartBegin = noop2; - this.onPartData = noop2; - this.onPartEnd = noop2; - this.boundaryChars = {}; - boundary = "\r\n--" + boundary; - const ui8a = new Uint8Array(boundary.length); - for (let i2 = 0; i2 < boundary.length; i2++) { - ui8a[i2] = boundary.charCodeAt(i2); - this.boundaryChars[ui8a[i2]] = true; - } - this.boundary = ui8a; - this.lookbehind = new Uint8Array(this.boundary.length + 8); - this.state = S.START_BOUNDARY; - } - /** - * @param {Uint8Array} data - */ - write(data) { - let i2 = 0; - const length_ = data.length; - let previousIndex = this.index; - let { lookbehind, boundary, boundaryChars, index, state, flags } = this; - const boundaryLength = this.boundary.length; - const boundaryEnd = boundaryLength - 1; - const bufferLength = data.length; - let c; - let cl; - const mark = (name) => { - this[name + "Mark"] = i2; - }; - const clear = (name) => { - delete this[name + "Mark"]; - }; - const callback = (callbackSymbol, start, end, ui8a) => { - if (start === void 0 || start !== end) { - this[callbackSymbol](ui8a && ui8a.subarray(start, end)); - } - }; - const dataCallback = (name, clear2) => { - const markSymbol = name + "Mark"; - if (!(markSymbol in this)) { - return; - } - if (clear2) { - callback(name, this[markSymbol], i2, data); - delete this[markSymbol]; - } else { - callback(name, this[markSymbol], data.length, data); - this[markSymbol] = 0; + function ReadableStreamDefaultTee(stream, cloneForBranch2) { + const reader = AcquireReadableStreamDefaultReader(stream); + let reading = false; + let readAgain = false; + let canceled1 = false; + let canceled2 = false; + let reason1; + let reason2; + let branch1; + let branch2; + let resolveCancelPromise; + const cancelPromise = newPromise((resolve) => { + resolveCancelPromise = resolve; + }); + function pullAlgorithm() { + if (reading) { + readAgain = true; + return promiseResolvedWith(void 0); } - }; - for (i2 = 0; i2 < length_; i2++) { - c = data[i2]; - switch (state) { - case S.START_BOUNDARY: - if (index === boundary.length - 2) { - if (c === HYPHEN) { - flags |= F.LAST_BOUNDARY; - } else if (c !== CR) { - return; + reading = true; + const readRequest = { + _chunkSteps: (chunk) => { + _queueMicrotask(() => { + readAgain = false; + const chunk1 = chunk; + const chunk2 = chunk; + if (!canceled1) { + ReadableStreamDefaultControllerEnqueue(branch1._readableStreamController, chunk1); } - index++; - break; - } else if (index - 1 === boundary.length - 2) { - if (flags & F.LAST_BOUNDARY && c === HYPHEN) { - state = S.END; - flags = 0; - } else if (!(flags & F.LAST_BOUNDARY) && c === LF) { - index = 0; - callback("onPartBegin"); - state = S.HEADER_FIELD_START; - } else { - return; + if (!canceled2) { + ReadableStreamDefaultControllerEnqueue(branch2._readableStreamController, chunk2); } - break; - } - if (c !== boundary[index + 2]) { - index = -2; - } - if (c === boundary[index + 2]) { - index++; + reading = false; + if (readAgain) { + pullAlgorithm(); + } + }); + }, + _closeSteps: () => { + reading = false; + if (!canceled1) { + ReadableStreamDefaultControllerClose(branch1._readableStreamController); } - break; - case S.HEADER_FIELD_START: - state = S.HEADER_FIELD; - mark("onHeaderField"); - index = 0; - case S.HEADER_FIELD: - if (c === CR) { - clear("onHeaderField"); - state = S.HEADERS_ALMOST_DONE; - break; + if (!canceled2) { + ReadableStreamDefaultControllerClose(branch2._readableStreamController); } - index++; - if (c === HYPHEN) { - break; + if (!canceled1 || !canceled2) { + resolveCancelPromise(void 0); } - if (c === COLON) { - if (index === 1) { - return; + }, + _errorSteps: () => { + reading = false; + } + }; + ReadableStreamDefaultReaderRead(reader, readRequest); + return promiseResolvedWith(void 0); + } + function cancel1Algorithm(reason) { + canceled1 = true; + reason1 = reason; + if (canceled2) { + const compositeReason = CreateArrayFromList([reason1, reason2]); + const cancelResult = ReadableStreamCancel(stream, compositeReason); + resolveCancelPromise(cancelResult); + } + return cancelPromise; + } + function cancel2Algorithm(reason) { + canceled2 = true; + reason2 = reason; + if (canceled1) { + const compositeReason = CreateArrayFromList([reason1, reason2]); + const cancelResult = ReadableStreamCancel(stream, compositeReason); + resolveCancelPromise(cancelResult); + } + return cancelPromise; + } + function startAlgorithm() { + } + branch1 = CreateReadableStream(startAlgorithm, pullAlgorithm, cancel1Algorithm); + branch2 = CreateReadableStream(startAlgorithm, pullAlgorithm, cancel2Algorithm); + uponRejection(reader._closedPromise, (r2) => { + ReadableStreamDefaultControllerError(branch1._readableStreamController, r2); + ReadableStreamDefaultControllerError(branch2._readableStreamController, r2); + if (!canceled1 || !canceled2) { + resolveCancelPromise(void 0); + } + return null; + }); + return [branch1, branch2]; + } + function ReadableByteStreamTee(stream) { + let reader = AcquireReadableStreamDefaultReader(stream); + let reading = false; + let readAgainForBranch1 = false; + let readAgainForBranch2 = false; + let canceled1 = false; + let canceled2 = false; + let reason1; + let reason2; + let branch1; + let branch2; + let resolveCancelPromise; + const cancelPromise = newPromise((resolve) => { + resolveCancelPromise = resolve; + }); + function forwardReaderError(thisReader) { + uponRejection(thisReader._closedPromise, (r2) => { + if (thisReader !== reader) { + return null; + } + ReadableByteStreamControllerError(branch1._readableStreamController, r2); + ReadableByteStreamControllerError(branch2._readableStreamController, r2); + if (!canceled1 || !canceled2) { + resolveCancelPromise(void 0); + } + return null; + }); + } + function pullWithDefaultReader() { + if (IsReadableStreamBYOBReader(reader)) { + ReadableStreamReaderGenericRelease(reader); + reader = AcquireReadableStreamDefaultReader(stream); + forwardReaderError(reader); + } + const readRequest = { + _chunkSteps: (chunk) => { + _queueMicrotask(() => { + readAgainForBranch1 = false; + readAgainForBranch2 = false; + const chunk1 = chunk; + let chunk2 = chunk; + if (!canceled1 && !canceled2) { + try { + chunk2 = CloneAsUint8Array(chunk); + } catch (cloneE) { + ReadableByteStreamControllerError(branch1._readableStreamController, cloneE); + ReadableByteStreamControllerError(branch2._readableStreamController, cloneE); + resolveCancelPromise(ReadableStreamCancel(stream, cloneE)); + return; + } } - dataCallback("onHeaderField", true); - state = S.HEADER_VALUE_START; - break; - } - cl = lower(c); - if (cl < A || cl > Z) { - return; - } - break; - case S.HEADER_VALUE_START: - if (c === SPACE) { - break; + if (!canceled1) { + ReadableByteStreamControllerEnqueue(branch1._readableStreamController, chunk1); + } + if (!canceled2) { + ReadableByteStreamControllerEnqueue(branch2._readableStreamController, chunk2); + } + reading = false; + if (readAgainForBranch1) { + pull1Algorithm(); + } else if (readAgainForBranch2) { + pull2Algorithm(); + } + }); + }, + _closeSteps: () => { + reading = false; + if (!canceled1) { + ReadableByteStreamControllerClose(branch1._readableStreamController); } - mark("onHeaderValue"); - state = S.HEADER_VALUE; - case S.HEADER_VALUE: - if (c === CR) { - dataCallback("onHeaderValue", true); - callback("onHeaderEnd"); - state = S.HEADER_VALUE_ALMOST_DONE; + if (!canceled2) { + ReadableByteStreamControllerClose(branch2._readableStreamController); } - break; - case S.HEADER_VALUE_ALMOST_DONE: - if (c !== LF) { - return; + if (branch1._readableStreamController._pendingPullIntos.length > 0) { + ReadableByteStreamControllerRespond(branch1._readableStreamController, 0); } - state = S.HEADER_FIELD_START; - break; - case S.HEADERS_ALMOST_DONE: - if (c !== LF) { - return; + if (branch2._readableStreamController._pendingPullIntos.length > 0) { + ReadableByteStreamControllerRespond(branch2._readableStreamController, 0); } - callback("onHeadersEnd"); - state = S.PART_DATA_START; - break; - case S.PART_DATA_START: - state = S.PART_DATA; - mark("onPartData"); - case S.PART_DATA: - previousIndex = index; - if (index === 0) { - i2 += boundaryEnd; - while (i2 < bufferLength && !(data[i2] in boundaryChars)) { - i2 += boundaryLength; - } - i2 -= boundaryEnd; - c = data[i2]; + if (!canceled1 || !canceled2) { + resolveCancelPromise(void 0); } - if (index < boundary.length) { - if (boundary[index] === c) { - if (index === 0) { - dataCallback("onPartData", true); - } - index++; - } else { - index = 0; - } - } else if (index === boundary.length) { - index++; - if (c === CR) { - flags |= F.PART_BOUNDARY; - } else if (c === HYPHEN) { - flags |= F.LAST_BOUNDARY; - } else { - index = 0; - } - } else if (index - 1 === boundary.length) { - if (flags & F.PART_BOUNDARY) { - index = 0; - if (c === LF) { - flags &= ~F.PART_BOUNDARY; - callback("onPartEnd"); - callback("onPartBegin"); - state = S.HEADER_FIELD_START; - break; + }, + _errorSteps: () => { + reading = false; + } + }; + ReadableStreamDefaultReaderRead(reader, readRequest); + } + function pullWithBYOBReader(view, forBranch2) { + if (IsReadableStreamDefaultReader(reader)) { + ReadableStreamReaderGenericRelease(reader); + reader = AcquireReadableStreamBYOBReader(stream); + forwardReaderError(reader); + } + const byobBranch = forBranch2 ? branch2 : branch1; + const otherBranch = forBranch2 ? branch1 : branch2; + const readIntoRequest = { + _chunkSteps: (chunk) => { + _queueMicrotask(() => { + readAgainForBranch1 = false; + readAgainForBranch2 = false; + const byobCanceled = forBranch2 ? canceled2 : canceled1; + const otherCanceled = forBranch2 ? canceled1 : canceled2; + if (!otherCanceled) { + let clonedChunk; + try { + clonedChunk = CloneAsUint8Array(chunk); + } catch (cloneE) { + ReadableByteStreamControllerError(byobBranch._readableStreamController, cloneE); + ReadableByteStreamControllerError(otherBranch._readableStreamController, cloneE); + resolveCancelPromise(ReadableStreamCancel(stream, cloneE)); + return; } - } else if (flags & F.LAST_BOUNDARY) { - if (c === HYPHEN) { - callback("onPartEnd"); - state = S.END; - flags = 0; - } else { - index = 0; + if (!byobCanceled) { + ReadableByteStreamControllerRespondWithNewView(byobBranch._readableStreamController, chunk); } - } else { - index = 0; + ReadableByteStreamControllerEnqueue(otherBranch._readableStreamController, clonedChunk); + } else if (!byobCanceled) { + ReadableByteStreamControllerRespondWithNewView(byobBranch._readableStreamController, chunk); + } + reading = false; + if (readAgainForBranch1) { + pull1Algorithm(); + } else if (readAgainForBranch2) { + pull2Algorithm(); } + }); + }, + _closeSteps: (chunk) => { + reading = false; + const byobCanceled = forBranch2 ? canceled2 : canceled1; + const otherCanceled = forBranch2 ? canceled1 : canceled2; + if (!byobCanceled) { + ReadableByteStreamControllerClose(byobBranch._readableStreamController); } - if (index > 0) { - lookbehind[index - 1] = c; - } else if (previousIndex > 0) { - const _lookbehind = new Uint8Array(lookbehind.buffer, lookbehind.byteOffset, lookbehind.byteLength); - callback("onPartData", 0, previousIndex, _lookbehind); - previousIndex = 0; - mark("onPartData"); - i2--; + if (!otherCanceled) { + ReadableByteStreamControllerClose(otherBranch._readableStreamController); } - break; - case S.END: - break; - default: - throw new Error(`Unexpected state entered: ${state}`); + if (chunk !== void 0) { + if (!byobCanceled) { + ReadableByteStreamControllerRespondWithNewView(byobBranch._readableStreamController, chunk); + } + if (!otherCanceled && otherBranch._readableStreamController._pendingPullIntos.length > 0) { + ReadableByteStreamControllerRespond(otherBranch._readableStreamController, 0); + } + } + if (!byobCanceled || !otherCanceled) { + resolveCancelPromise(void 0); + } + }, + _errorSteps: () => { + reading = false; + } + }; + ReadableStreamBYOBReaderRead(reader, view, 1, readIntoRequest); + } + function pull1Algorithm() { + if (reading) { + readAgainForBranch1 = true; + return promiseResolvedWith(void 0); + } + reading = true; + const byobRequest = ReadableByteStreamControllerGetBYOBRequest(branch1._readableStreamController); + if (byobRequest === null) { + pullWithDefaultReader(); + } else { + pullWithBYOBReader(byobRequest._view, false); } + return promiseResolvedWith(void 0); } - dataCallback("onHeaderField"); - dataCallback("onHeaderValue"); - dataCallback("onPartData"); - this.index = index; - this.state = state; - this.flags = flags; - } - end() { - if (this.state === S.HEADER_FIELD_START && this.index === 0 || this.state === S.PART_DATA && this.index === this.boundary.length) { - this.onPartEnd(); - } else if (this.state !== S.END) { - throw new Error("MultipartParser.end(): stream ended unexpectedly"); + function pull2Algorithm() { + if (reading) { + readAgainForBranch2 = true; + return promiseResolvedWith(void 0); + } + reading = true; + const byobRequest = ReadableByteStreamControllerGetBYOBRequest(branch2._readableStreamController); + if (byobRequest === null) { + pullWithDefaultReader(); + } else { + pullWithBYOBReader(byobRequest._view, true); + } + return promiseResolvedWith(void 0); } - } - }; - } -}); - -// src/main.ts -var import_github = __toESM(require_github(), 1); - -// node_modules/chalk/source/vendor/ansi-styles/index.js -var ANSI_BACKGROUND_OFFSET = 10; -var wrapAnsi16 = (offset = 0) => (code) => `\x1B[${code + offset}m`; -var wrapAnsi256 = (offset = 0) => (code) => `\x1B[${38 + offset};5;${code}m`; -var wrapAnsi16m = (offset = 0) => (red, green, blue) => `\x1B[${38 + offset};2;${red};${green};${blue}m`; -var styles = { - modifier: { - reset: [0, 0], - // 21 isn't widely supported and 22 does the same thing - bold: [1, 22], - dim: [2, 22], - italic: [3, 23], - underline: [4, 24], - overline: [53, 55], - inverse: [7, 27], - hidden: [8, 28], - strikethrough: [9, 29] - }, - color: { - black: [30, 39], - red: [31, 39], - green: [32, 39], - yellow: [33, 39], - blue: [34, 39], - magenta: [35, 39], - cyan: [36, 39], - white: [37, 39], - // Bright color - blackBright: [90, 39], - gray: [90, 39], - // Alias of `blackBright` - grey: [90, 39], - // Alias of `blackBright` - redBright: [91, 39], - greenBright: [92, 39], - yellowBright: [93, 39], - blueBright: [94, 39], - magentaBright: [95, 39], - cyanBright: [96, 39], - whiteBright: [97, 39] - }, - bgColor: { - bgBlack: [40, 49], - bgRed: [41, 49], - bgGreen: [42, 49], - bgYellow: [43, 49], - bgBlue: [44, 49], - bgMagenta: [45, 49], - bgCyan: [46, 49], - bgWhite: [47, 49], - // Bright color - bgBlackBright: [100, 49], - bgGray: [100, 49], - // Alias of `bgBlackBright` - bgGrey: [100, 49], - // Alias of `bgBlackBright` - bgRedBright: [101, 49], - bgGreenBright: [102, 49], - bgYellowBright: [103, 49], - bgBlueBright: [104, 49], - bgMagentaBright: [105, 49], - bgCyanBright: [106, 49], - bgWhiteBright: [107, 49] - } -}; -var modifierNames = Object.keys(styles.modifier); -var foregroundColorNames = Object.keys(styles.color); -var backgroundColorNames = Object.keys(styles.bgColor); -var colorNames = [...foregroundColorNames, ...backgroundColorNames]; -function assembleStyles() { - const codes = /* @__PURE__ */ new Map(); - for (const [groupName, group] of Object.entries(styles)) { - for (const [styleName, style] of Object.entries(group)) { - styles[styleName] = { - open: `\x1B[${style[0]}m`, - close: `\x1B[${style[1]}m` - }; - group[styleName] = styles[styleName]; - codes.set(style[0], style[1]); - } - Object.defineProperty(styles, groupName, { - value: group, - enumerable: false - }); - } - Object.defineProperty(styles, "codes", { - value: codes, - enumerable: false - }); - styles.color.close = "\x1B[39m"; - styles.bgColor.close = "\x1B[49m"; - styles.color.ansi = wrapAnsi16(); - styles.color.ansi256 = wrapAnsi256(); - styles.color.ansi16m = wrapAnsi16m(); - styles.bgColor.ansi = wrapAnsi16(ANSI_BACKGROUND_OFFSET); - styles.bgColor.ansi256 = wrapAnsi256(ANSI_BACKGROUND_OFFSET); - styles.bgColor.ansi16m = wrapAnsi16m(ANSI_BACKGROUND_OFFSET); - Object.defineProperties(styles, { - rgbToAnsi256: { - value(red, green, blue) { - if (red === green && green === blue) { - if (red < 8) { - return 16; + function cancel1Algorithm(reason) { + canceled1 = true; + reason1 = reason; + if (canceled2) { + const compositeReason = CreateArrayFromList([reason1, reason2]); + const cancelResult = ReadableStreamCancel(stream, compositeReason); + resolveCancelPromise(cancelResult); } - if (red > 248) { - return 231; + return cancelPromise; + } + function cancel2Algorithm(reason) { + canceled2 = true; + reason2 = reason; + if (canceled1) { + const compositeReason = CreateArrayFromList([reason1, reason2]); + const cancelResult = ReadableStreamCancel(stream, compositeReason); + resolveCancelPromise(cancelResult); } - return Math.round((red - 8) / 247 * 24) + 232; + return cancelPromise; } - return 16 + 36 * Math.round(red / 255 * 5) + 6 * Math.round(green / 255 * 5) + Math.round(blue / 255 * 5); - }, - enumerable: false - }, - hexToRgb: { - value(hex) { - const matches = /[a-f\d]{6}|[a-f\d]{3}/i.exec(hex.toString(16)); - if (!matches) { - return [0, 0, 0]; + function startAlgorithm() { + return; } - let [colorString] = matches; - if (colorString.length === 3) { - colorString = [...colorString].map((character) => character + character).join(""); + branch1 = CreateReadableByteStream(startAlgorithm, pull1Algorithm, cancel1Algorithm); + branch2 = CreateReadableByteStream(startAlgorithm, pull2Algorithm, cancel2Algorithm); + forwardReaderError(reader); + return [branch1, branch2]; + } + function isReadableStreamLike(stream) { + return typeIsObject(stream) && typeof stream.getReader !== "undefined"; + } + function ReadableStreamFrom(source) { + if (isReadableStreamLike(source)) { + return ReadableStreamFromDefaultReader(source.getReader()); } - const integer = Number.parseInt(colorString, 16); - return [ - /* eslint-disable no-bitwise */ - integer >> 16 & 255, - integer >> 8 & 255, - integer & 255 - /* eslint-enable no-bitwise */ - ]; - }, - enumerable: false - }, - hexToAnsi256: { - value: (hex) => styles.rgbToAnsi256(...styles.hexToRgb(hex)), - enumerable: false - }, - ansi256ToAnsi: { - value(code) { - if (code < 8) { - return 30 + code; + return ReadableStreamFromIterable(source); + } + function ReadableStreamFromIterable(asyncIterable) { + let stream; + const iteratorRecord = GetIterator(asyncIterable, "async"); + const startAlgorithm = noop4; + function pullAlgorithm() { + let nextResult; + try { + nextResult = IteratorNext(iteratorRecord); + } catch (e2) { + return promiseRejectedWith(e2); + } + const nextPromise = promiseResolvedWith(nextResult); + return transformPromiseWith(nextPromise, (iterResult) => { + if (!typeIsObject(iterResult)) { + throw new TypeError("The promise returned by the iterator.next() method must fulfill with an object"); + } + const done = IteratorComplete(iterResult); + if (done) { + ReadableStreamDefaultControllerClose(stream._readableStreamController); + } else { + const value = IteratorValue(iterResult); + ReadableStreamDefaultControllerEnqueue(stream._readableStreamController, value); + } + }); } - if (code < 16) { - return 90 + (code - 8); + function cancelAlgorithm(reason) { + const iterator3 = iteratorRecord.iterator; + let returnMethod; + try { + returnMethod = GetMethod(iterator3, "return"); + } catch (e2) { + return promiseRejectedWith(e2); + } + if (returnMethod === void 0) { + return promiseResolvedWith(void 0); + } + let returnResult; + try { + returnResult = reflectCall(returnMethod, iterator3, [reason]); + } catch (e2) { + return promiseRejectedWith(e2); + } + const returnPromise = promiseResolvedWith(returnResult); + return transformPromiseWith(returnPromise, (iterResult) => { + if (!typeIsObject(iterResult)) { + throw new TypeError("The promise returned by the iterator.return() method must fulfill with an object"); + } + return void 0; + }); } - let red; - let green; - let blue; - if (code >= 232) { - red = ((code - 232) * 10 + 8) / 255; - green = red; - blue = red; - } else { - code -= 16; - const remainder = code % 36; - red = Math.floor(code / 36) / 5; - green = Math.floor(remainder / 6) / 5; - blue = remainder % 6 / 5; + stream = CreateReadableStream(startAlgorithm, pullAlgorithm, cancelAlgorithm, 0); + return stream; + } + function ReadableStreamFromDefaultReader(reader) { + let stream; + const startAlgorithm = noop4; + function pullAlgorithm() { + let readPromise; + try { + readPromise = reader.read(); + } catch (e2) { + return promiseRejectedWith(e2); + } + return transformPromiseWith(readPromise, (readResult) => { + if (!typeIsObject(readResult)) { + throw new TypeError("The promise returned by the reader.read() method must fulfill with an object"); + } + if (readResult.done) { + ReadableStreamDefaultControllerClose(stream._readableStreamController); + } else { + const value = readResult.value; + ReadableStreamDefaultControllerEnqueue(stream._readableStreamController, value); + } + }); } - const value = Math.max(red, green, blue) * 2; - if (value === 0) { - return 30; + function cancelAlgorithm(reason) { + try { + return promiseResolvedWith(reader.cancel(reason)); + } catch (e2) { + return promiseRejectedWith(e2); + } } - let result = 30 + (Math.round(blue) << 2 | Math.round(green) << 1 | Math.round(red)); - if (value === 2) { - result += 60; + stream = CreateReadableStream(startAlgorithm, pullAlgorithm, cancelAlgorithm, 0); + return stream; + } + function convertUnderlyingDefaultOrByteSource(source, context2) { + assertDictionary(source, context2); + const original = source; + const autoAllocateChunkSize = original === null || original === void 0 ? void 0 : original.autoAllocateChunkSize; + const cancel = original === null || original === void 0 ? void 0 : original.cancel; + const pull = original === null || original === void 0 ? void 0 : original.pull; + const start = original === null || original === void 0 ? void 0 : original.start; + const type = original === null || original === void 0 ? void 0 : original.type; + return { + autoAllocateChunkSize: autoAllocateChunkSize === void 0 ? void 0 : convertUnsignedLongLongWithEnforceRange(autoAllocateChunkSize, `${context2} has member 'autoAllocateChunkSize' that`), + cancel: cancel === void 0 ? void 0 : convertUnderlyingSourceCancelCallback(cancel, original, `${context2} has member 'cancel' that`), + pull: pull === void 0 ? void 0 : convertUnderlyingSourcePullCallback(pull, original, `${context2} has member 'pull' that`), + start: start === void 0 ? void 0 : convertUnderlyingSourceStartCallback(start, original, `${context2} has member 'start' that`), + type: type === void 0 ? void 0 : convertReadableStreamType(type, `${context2} has member 'type' that`) + }; + } + function convertUnderlyingSourceCancelCallback(fn, original, context2) { + assertFunction(fn, context2); + return (reason) => promiseCall(fn, original, [reason]); + } + function convertUnderlyingSourcePullCallback(fn, original, context2) { + assertFunction(fn, context2); + return (controller) => promiseCall(fn, original, [controller]); + } + function convertUnderlyingSourceStartCallback(fn, original, context2) { + assertFunction(fn, context2); + return (controller) => reflectCall(fn, original, [controller]); + } + function convertReadableStreamType(type, context2) { + type = `${type}`; + if (type !== "bytes") { + throw new TypeError(`${context2} '${type}' is not a valid enumeration value for ReadableStreamType`); } - return result; - }, - enumerable: false - }, - rgbToAnsi: { - value: (red, green, blue) => styles.ansi256ToAnsi(styles.rgbToAnsi256(red, green, blue)), - enumerable: false - }, - hexToAnsi: { - value: (hex) => styles.ansi256ToAnsi(styles.hexToAnsi256(hex)), - enumerable: false - } - }); - return styles; -} -var ansiStyles = assembleStyles(); -var ansi_styles_default = ansiStyles; - -// node_modules/chalk/source/vendor/supports-color/index.js -var import_node_process = __toESM(require("process"), 1); -var import_node_os = __toESM(require("os"), 1); -var import_node_tty = __toESM(require("tty"), 1); -function hasFlag(flag, argv = globalThis.Deno ? globalThis.Deno.args : import_node_process.default.argv) { - const prefix = flag.startsWith("-") ? "" : flag.length === 1 ? "-" : "--"; - const position = argv.indexOf(prefix + flag); - const terminatorPosition = argv.indexOf("--"); - return position !== -1 && (terminatorPosition === -1 || position < terminatorPosition); -} -var { env } = import_node_process.default; -var flagForceColor; -if (hasFlag("no-color") || hasFlag("no-colors") || hasFlag("color=false") || hasFlag("color=never")) { - flagForceColor = 0; -} else if (hasFlag("color") || hasFlag("colors") || hasFlag("color=true") || hasFlag("color=always")) { - flagForceColor = 1; -} -function envForceColor() { - if ("FORCE_COLOR" in env) { - if (env.FORCE_COLOR === "true") { - return 1; - } - if (env.FORCE_COLOR === "false") { - return 0; - } - return env.FORCE_COLOR.length === 0 ? 1 : Math.min(Number.parseInt(env.FORCE_COLOR, 10), 3); - } -} -function translateLevel(level) { - if (level === 0) { - return false; - } - return { - level, - hasBasic: true, - has256: level >= 2, - has16m: level >= 3 - }; -} -function _supportsColor(haveStream, { streamIsTTY, sniffFlags = true } = {}) { - const noFlagForceColor = envForceColor(); - if (noFlagForceColor !== void 0) { - flagForceColor = noFlagForceColor; - } - const forceColor = sniffFlags ? flagForceColor : noFlagForceColor; - if (forceColor === 0) { - return 0; - } - if (sniffFlags) { - if (hasFlag("color=16m") || hasFlag("color=full") || hasFlag("color=truecolor")) { - return 3; - } - if (hasFlag("color=256")) { - return 2; - } - } - if ("TF_BUILD" in env && "AGENT_NAME" in env) { - return 1; - } - if (haveStream && !streamIsTTY && forceColor === void 0) { - return 0; - } - const min = forceColor || 0; - if (env.TERM === "dumb") { - return min; - } - if (import_node_process.default.platform === "win32") { - const osRelease = import_node_os.default.release().split("."); - if (Number(osRelease[0]) >= 10 && Number(osRelease[2]) >= 10586) { - return Number(osRelease[2]) >= 14931 ? 3 : 2; - } - return 1; - } - if ("CI" in env) { - if ("GITHUB_ACTIONS" in env || "GITEA_ACTIONS" in env) { - return 3; - } - if (["TRAVIS", "CIRCLECI", "APPVEYOR", "GITLAB_CI", "BUILDKITE", "DRONE"].some((sign) => sign in env) || env.CI_NAME === "codeship") { - return 1; - } - return min; - } - if ("TEAMCITY_VERSION" in env) { - return /^(9\.(0*[1-9]\d*)\.|\d{2,}\.)/.test(env.TEAMCITY_VERSION) ? 1 : 0; - } - if (env.COLORTERM === "truecolor") { - return 3; - } - if (env.TERM === "xterm-kitty") { - return 3; - } - if ("TERM_PROGRAM" in env) { - const version2 = Number.parseInt((env.TERM_PROGRAM_VERSION || "").split(".")[0], 10); - switch (env.TERM_PROGRAM) { - case "iTerm.app": { - return version2 >= 3 ? 3 : 2; + return type; } - case "Apple_Terminal": { - return 2; + function convertIteratorOptions(options, context2) { + assertDictionary(options, context2); + const preventCancel = options === null || options === void 0 ? void 0 : options.preventCancel; + return { preventCancel: Boolean(preventCancel) }; } - } - } - if (/-256(color)?$/i.test(env.TERM)) { - return 2; - } - if (/^screen|^xterm|^vt100|^vt220|^rxvt|color|ansi|cygwin|linux/i.test(env.TERM)) { - return 1; - } - if ("COLORTERM" in env) { - return 1; - } - return min; -} -function createSupportsColor(stream, options = {}) { - const level = _supportsColor(stream, { - streamIsTTY: stream && stream.isTTY, - ...options - }); - return translateLevel(level); -} -var supportsColor = { + function convertPipeOptions(options, context2) { + assertDictionary(options, context2); + const preventAbort = options === null || options === void 0 ? void 0 : options.preventAbort; + const preventCancel = options === null || options === void 0 ? void 0 : options.preventCancel; + const preventClose = options === null || options === void 0 ? void 0 : options.preventClose; + const signal = options === null || options === void 0 ? void 0 : options.signal; + if (signal !== void 0) { + assertAbortSignal(signal, `${context2} has member 'signal' that`); + } + return { + preventAbort: Boolean(preventAbort), + preventCancel: Boolean(preventCancel), + preventClose: Boolean(preventClose), + signal + }; + } + function assertAbortSignal(signal, context2) { + if (!isAbortSignal2(signal)) { + throw new TypeError(`${context2} is not an AbortSignal.`); + } + } + function convertReadableWritablePair(pair, context2) { + assertDictionary(pair, context2); + const readable = pair === null || pair === void 0 ? void 0 : pair.readable; + assertRequiredField(readable, "readable", "ReadableWritablePair"); + assertReadableStream(readable, `${context2} has member 'readable' that`); + const writable = pair === null || pair === void 0 ? void 0 : pair.writable; + assertRequiredField(writable, "writable", "ReadableWritablePair"); + assertWritableStream(writable, `${context2} has member 'writable' that`); + return { readable, writable }; + } + class ReadableStream2 { + constructor(rawUnderlyingSource = {}, rawStrategy = {}) { + if (rawUnderlyingSource === void 0) { + rawUnderlyingSource = null; + } else { + assertObject(rawUnderlyingSource, "First parameter"); + } + const strategy = convertQueuingStrategy(rawStrategy, "Second parameter"); + const underlyingSource = convertUnderlyingDefaultOrByteSource(rawUnderlyingSource, "First parameter"); + InitializeReadableStream(this); + if (underlyingSource.type === "bytes") { + if (strategy.size !== void 0) { + throw new RangeError("The strategy for a byte stream cannot have a size function"); + } + const highWaterMark = ExtractHighWaterMark(strategy, 0); + SetUpReadableByteStreamControllerFromUnderlyingSource(this, underlyingSource, highWaterMark); + } else { + const sizeAlgorithm = ExtractSizeAlgorithm(strategy); + const highWaterMark = ExtractHighWaterMark(strategy, 1); + SetUpReadableStreamDefaultControllerFromUnderlyingSource(this, underlyingSource, highWaterMark, sizeAlgorithm); + } + } + /** + * Whether or not the readable stream is locked to a {@link ReadableStreamDefaultReader | reader}. + */ + get locked() { + if (!IsReadableStream(this)) { + throw streamBrandCheckException$1("locked"); + } + return IsReadableStreamLocked(this); + } + /** + * Cancels the stream, signaling a loss of interest in the stream by a consumer. + * + * The supplied `reason` argument will be given to the underlying source's {@link UnderlyingSource.cancel | cancel()} + * method, which might or might not use it. + */ + cancel(reason = void 0) { + if (!IsReadableStream(this)) { + return promiseRejectedWith(streamBrandCheckException$1("cancel")); + } + if (IsReadableStreamLocked(this)) { + return promiseRejectedWith(new TypeError("Cannot cancel a stream that already has a reader")); + } + return ReadableStreamCancel(this, reason); + } + getReader(rawOptions = void 0) { + if (!IsReadableStream(this)) { + throw streamBrandCheckException$1("getReader"); + } + const options = convertReaderOptions(rawOptions, "First parameter"); + if (options.mode === void 0) { + return AcquireReadableStreamDefaultReader(this); + } + return AcquireReadableStreamBYOBReader(this); + } + pipeThrough(rawTransform, rawOptions = {}) { + if (!IsReadableStream(this)) { + throw streamBrandCheckException$1("pipeThrough"); + } + assertRequiredArgument(rawTransform, 1, "pipeThrough"); + const transform = convertReadableWritablePair(rawTransform, "First parameter"); + const options = convertPipeOptions(rawOptions, "Second parameter"); + if (IsReadableStreamLocked(this)) { + throw new TypeError("ReadableStream.prototype.pipeThrough cannot be used on a locked ReadableStream"); + } + if (IsWritableStreamLocked(transform.writable)) { + throw new TypeError("ReadableStream.prototype.pipeThrough cannot be used on a locked WritableStream"); + } + const promise = ReadableStreamPipeTo(this, transform.writable, options.preventClose, options.preventAbort, options.preventCancel, options.signal); + setPromiseIsHandledToTrue(promise); + return transform.readable; + } + pipeTo(destination, rawOptions = {}) { + if (!IsReadableStream(this)) { + return promiseRejectedWith(streamBrandCheckException$1("pipeTo")); + } + if (destination === void 0) { + return promiseRejectedWith(`Parameter 1 is required in 'pipeTo'.`); + } + if (!IsWritableStream(destination)) { + return promiseRejectedWith(new TypeError(`ReadableStream.prototype.pipeTo's first argument must be a WritableStream`)); + } + let options; + try { + options = convertPipeOptions(rawOptions, "Second parameter"); + } catch (e2) { + return promiseRejectedWith(e2); + } + if (IsReadableStreamLocked(this)) { + return promiseRejectedWith(new TypeError("ReadableStream.prototype.pipeTo cannot be used on a locked ReadableStream")); + } + if (IsWritableStreamLocked(destination)) { + return promiseRejectedWith(new TypeError("ReadableStream.prototype.pipeTo cannot be used on a locked WritableStream")); + } + return ReadableStreamPipeTo(this, destination, options.preventClose, options.preventAbort, options.preventCancel, options.signal); + } + /** + * Tees this readable stream, returning a two-element array containing the two resulting branches as + * new {@link ReadableStream} instances. + * + * Teeing a stream will lock it, preventing any other consumer from acquiring a reader. + * To cancel the stream, cancel both of the resulting branches; a composite cancellation reason will then be + * propagated to the stream's underlying source. + * + * Note that the chunks seen in each branch will be the same object. If the chunks are not immutable, + * this could allow interference between the two branches. + */ + tee() { + if (!IsReadableStream(this)) { + throw streamBrandCheckException$1("tee"); + } + const branches = ReadableStreamTee(this); + return CreateArrayFromList(branches); + } + values(rawOptions = void 0) { + if (!IsReadableStream(this)) { + throw streamBrandCheckException$1("values"); + } + const options = convertIteratorOptions(rawOptions, "First parameter"); + return AcquireReadableStreamAsyncIterator(this, options.preventCancel); + } + [SymbolAsyncIterator](options) { + return this.values(options); + } + /** + * Creates a new ReadableStream wrapping the provided iterable or async iterable. + * + * This can be used to adapt various kinds of objects into a readable stream, + * such as an array, an async generator, or a Node.js readable stream. + */ + static from(asyncIterable) { + return ReadableStreamFrom(asyncIterable); + } + } + Object.defineProperties(ReadableStream2, { + from: { enumerable: true } + }); + Object.defineProperties(ReadableStream2.prototype, { + cancel: { enumerable: true }, + getReader: { enumerable: true }, + pipeThrough: { enumerable: true }, + pipeTo: { enumerable: true }, + tee: { enumerable: true }, + values: { enumerable: true }, + locked: { enumerable: true } + }); + setFunctionName(ReadableStream2.from, "from"); + setFunctionName(ReadableStream2.prototype.cancel, "cancel"); + setFunctionName(ReadableStream2.prototype.getReader, "getReader"); + setFunctionName(ReadableStream2.prototype.pipeThrough, "pipeThrough"); + setFunctionName(ReadableStream2.prototype.pipeTo, "pipeTo"); + setFunctionName(ReadableStream2.prototype.tee, "tee"); + setFunctionName(ReadableStream2.prototype.values, "values"); + if (typeof Symbol.toStringTag === "symbol") { + Object.defineProperty(ReadableStream2.prototype, Symbol.toStringTag, { + value: "ReadableStream", + configurable: true + }); + } + Object.defineProperty(ReadableStream2.prototype, SymbolAsyncIterator, { + value: ReadableStream2.prototype.values, + writable: true, + configurable: true + }); + function CreateReadableStream(startAlgorithm, pullAlgorithm, cancelAlgorithm, highWaterMark = 1, sizeAlgorithm = () => 1) { + const stream = Object.create(ReadableStream2.prototype); + InitializeReadableStream(stream); + const controller = Object.create(ReadableStreamDefaultController.prototype); + SetUpReadableStreamDefaultController(stream, controller, startAlgorithm, pullAlgorithm, cancelAlgorithm, highWaterMark, sizeAlgorithm); + return stream; + } + function CreateReadableByteStream(startAlgorithm, pullAlgorithm, cancelAlgorithm) { + const stream = Object.create(ReadableStream2.prototype); + InitializeReadableStream(stream); + const controller = Object.create(ReadableByteStreamController.prototype); + SetUpReadableByteStreamController(stream, controller, startAlgorithm, pullAlgorithm, cancelAlgorithm, 0, void 0); + return stream; + } + function InitializeReadableStream(stream) { + stream._state = "readable"; + stream._reader = void 0; + stream._storedError = void 0; + stream._disturbed = false; + } + function IsReadableStream(x2) { + if (!typeIsObject(x2)) { + return false; + } + if (!Object.prototype.hasOwnProperty.call(x2, "_readableStreamController")) { + return false; + } + return x2 instanceof ReadableStream2; + } + function IsReadableStreamLocked(stream) { + if (stream._reader === void 0) { + return false; + } + return true; + } + function ReadableStreamCancel(stream, reason) { + stream._disturbed = true; + if (stream._state === "closed") { + return promiseResolvedWith(void 0); + } + if (stream._state === "errored") { + return promiseRejectedWith(stream._storedError); + } + ReadableStreamClose(stream); + const reader = stream._reader; + if (reader !== void 0 && IsReadableStreamBYOBReader(reader)) { + const readIntoRequests = reader._readIntoRequests; + reader._readIntoRequests = new SimpleQueue(); + readIntoRequests.forEach((readIntoRequest) => { + readIntoRequest._closeSteps(void 0); + }); + } + const sourceCancelPromise = stream._readableStreamController[CancelSteps](reason); + return transformPromiseWith(sourceCancelPromise, noop4); + } + function ReadableStreamClose(stream) { + stream._state = "closed"; + const reader = stream._reader; + if (reader === void 0) { + return; + } + defaultReaderClosedPromiseResolve(reader); + if (IsReadableStreamDefaultReader(reader)) { + const readRequests = reader._readRequests; + reader._readRequests = new SimpleQueue(); + readRequests.forEach((readRequest) => { + readRequest._closeSteps(); + }); + } + } + function ReadableStreamError(stream, e2) { + stream._state = "errored"; + stream._storedError = e2; + const reader = stream._reader; + if (reader === void 0) { + return; + } + defaultReaderClosedPromiseReject(reader, e2); + if (IsReadableStreamDefaultReader(reader)) { + ReadableStreamDefaultReaderErrorReadRequests(reader, e2); + } else { + ReadableStreamBYOBReaderErrorReadIntoRequests(reader, e2); + } + } + function streamBrandCheckException$1(name) { + return new TypeError(`ReadableStream.prototype.${name} can only be used on a ReadableStream`); + } + function convertQueuingStrategyInit(init, context2) { + assertDictionary(init, context2); + const highWaterMark = init === null || init === void 0 ? void 0 : init.highWaterMark; + assertRequiredField(highWaterMark, "highWaterMark", "QueuingStrategyInit"); + return { + highWaterMark: convertUnrestrictedDouble(highWaterMark) + }; + } + const byteLengthSizeFunction = (chunk) => { + return chunk.byteLength; + }; + setFunctionName(byteLengthSizeFunction, "size"); + class ByteLengthQueuingStrategy { + constructor(options) { + assertRequiredArgument(options, 1, "ByteLengthQueuingStrategy"); + options = convertQueuingStrategyInit(options, "First parameter"); + this._byteLengthQueuingStrategyHighWaterMark = options.highWaterMark; + } + /** + * Returns the high water mark provided to the constructor. + */ + get highWaterMark() { + if (!IsByteLengthQueuingStrategy(this)) { + throw byteLengthBrandCheckException("highWaterMark"); + } + return this._byteLengthQueuingStrategyHighWaterMark; + } + /** + * Measures the size of `chunk` by returning the value of its `byteLength` property. + */ + get size() { + if (!IsByteLengthQueuingStrategy(this)) { + throw byteLengthBrandCheckException("size"); + } + return byteLengthSizeFunction; + } + } + Object.defineProperties(ByteLengthQueuingStrategy.prototype, { + highWaterMark: { enumerable: true }, + size: { enumerable: true } + }); + if (typeof Symbol.toStringTag === "symbol") { + Object.defineProperty(ByteLengthQueuingStrategy.prototype, Symbol.toStringTag, { + value: "ByteLengthQueuingStrategy", + configurable: true + }); + } + function byteLengthBrandCheckException(name) { + return new TypeError(`ByteLengthQueuingStrategy.prototype.${name} can only be used on a ByteLengthQueuingStrategy`); + } + function IsByteLengthQueuingStrategy(x2) { + if (!typeIsObject(x2)) { + return false; + } + if (!Object.prototype.hasOwnProperty.call(x2, "_byteLengthQueuingStrategyHighWaterMark")) { + return false; + } + return x2 instanceof ByteLengthQueuingStrategy; + } + const countSizeFunction = () => { + return 1; + }; + setFunctionName(countSizeFunction, "size"); + class CountQueuingStrategy { + constructor(options) { + assertRequiredArgument(options, 1, "CountQueuingStrategy"); + options = convertQueuingStrategyInit(options, "First parameter"); + this._countQueuingStrategyHighWaterMark = options.highWaterMark; + } + /** + * Returns the high water mark provided to the constructor. + */ + get highWaterMark() { + if (!IsCountQueuingStrategy(this)) { + throw countBrandCheckException("highWaterMark"); + } + return this._countQueuingStrategyHighWaterMark; + } + /** + * Measures the size of `chunk` by always returning 1. + * This ensures that the total queue size is a count of the number of chunks in the queue. + */ + get size() { + if (!IsCountQueuingStrategy(this)) { + throw countBrandCheckException("size"); + } + return countSizeFunction; + } + } + Object.defineProperties(CountQueuingStrategy.prototype, { + highWaterMark: { enumerable: true }, + size: { enumerable: true } + }); + if (typeof Symbol.toStringTag === "symbol") { + Object.defineProperty(CountQueuingStrategy.prototype, Symbol.toStringTag, { + value: "CountQueuingStrategy", + configurable: true + }); + } + function countBrandCheckException(name) { + return new TypeError(`CountQueuingStrategy.prototype.${name} can only be used on a CountQueuingStrategy`); + } + function IsCountQueuingStrategy(x2) { + if (!typeIsObject(x2)) { + return false; + } + if (!Object.prototype.hasOwnProperty.call(x2, "_countQueuingStrategyHighWaterMark")) { + return false; + } + return x2 instanceof CountQueuingStrategy; + } + function convertTransformer(original, context2) { + assertDictionary(original, context2); + const cancel = original === null || original === void 0 ? void 0 : original.cancel; + const flush = original === null || original === void 0 ? void 0 : original.flush; + const readableType = original === null || original === void 0 ? void 0 : original.readableType; + const start = original === null || original === void 0 ? void 0 : original.start; + const transform = original === null || original === void 0 ? void 0 : original.transform; + const writableType = original === null || original === void 0 ? void 0 : original.writableType; + return { + cancel: cancel === void 0 ? void 0 : convertTransformerCancelCallback(cancel, original, `${context2} has member 'cancel' that`), + flush: flush === void 0 ? void 0 : convertTransformerFlushCallback(flush, original, `${context2} has member 'flush' that`), + readableType, + start: start === void 0 ? void 0 : convertTransformerStartCallback(start, original, `${context2} has member 'start' that`), + transform: transform === void 0 ? void 0 : convertTransformerTransformCallback(transform, original, `${context2} has member 'transform' that`), + writableType + }; + } + function convertTransformerFlushCallback(fn, original, context2) { + assertFunction(fn, context2); + return (controller) => promiseCall(fn, original, [controller]); + } + function convertTransformerStartCallback(fn, original, context2) { + assertFunction(fn, context2); + return (controller) => reflectCall(fn, original, [controller]); + } + function convertTransformerTransformCallback(fn, original, context2) { + assertFunction(fn, context2); + return (chunk, controller) => promiseCall(fn, original, [chunk, controller]); + } + function convertTransformerCancelCallback(fn, original, context2) { + assertFunction(fn, context2); + return (reason) => promiseCall(fn, original, [reason]); + } + class TransformStream2 { + constructor(rawTransformer = {}, rawWritableStrategy = {}, rawReadableStrategy = {}) { + if (rawTransformer === void 0) { + rawTransformer = null; + } + const writableStrategy = convertQueuingStrategy(rawWritableStrategy, "Second parameter"); + const readableStrategy = convertQueuingStrategy(rawReadableStrategy, "Third parameter"); + const transformer = convertTransformer(rawTransformer, "First parameter"); + if (transformer.readableType !== void 0) { + throw new RangeError("Invalid readableType specified"); + } + if (transformer.writableType !== void 0) { + throw new RangeError("Invalid writableType specified"); + } + const readableHighWaterMark = ExtractHighWaterMark(readableStrategy, 0); + const readableSizeAlgorithm = ExtractSizeAlgorithm(readableStrategy); + const writableHighWaterMark = ExtractHighWaterMark(writableStrategy, 1); + const writableSizeAlgorithm = ExtractSizeAlgorithm(writableStrategy); + let startPromise_resolve; + const startPromise = newPromise((resolve) => { + startPromise_resolve = resolve; + }); + InitializeTransformStream(this, startPromise, writableHighWaterMark, writableSizeAlgorithm, readableHighWaterMark, readableSizeAlgorithm); + SetUpTransformStreamDefaultControllerFromTransformer(this, transformer); + if (transformer.start !== void 0) { + startPromise_resolve(transformer.start(this._transformStreamController)); + } else { + startPromise_resolve(void 0); + } + } + /** + * The readable side of the transform stream. + */ + get readable() { + if (!IsTransformStream(this)) { + throw streamBrandCheckException("readable"); + } + return this._readable; + } + /** + * The writable side of the transform stream. + */ + get writable() { + if (!IsTransformStream(this)) { + throw streamBrandCheckException("writable"); + } + return this._writable; + } + } + Object.defineProperties(TransformStream2.prototype, { + readable: { enumerable: true }, + writable: { enumerable: true } + }); + if (typeof Symbol.toStringTag === "symbol") { + Object.defineProperty(TransformStream2.prototype, Symbol.toStringTag, { + value: "TransformStream", + configurable: true + }); + } + function InitializeTransformStream(stream, startPromise, writableHighWaterMark, writableSizeAlgorithm, readableHighWaterMark, readableSizeAlgorithm) { + function startAlgorithm() { + return startPromise; + } + function writeAlgorithm(chunk) { + return TransformStreamDefaultSinkWriteAlgorithm(stream, chunk); + } + function abortAlgorithm(reason) { + return TransformStreamDefaultSinkAbortAlgorithm(stream, reason); + } + function closeAlgorithm() { + return TransformStreamDefaultSinkCloseAlgorithm(stream); + } + stream._writable = CreateWritableStream(startAlgorithm, writeAlgorithm, closeAlgorithm, abortAlgorithm, writableHighWaterMark, writableSizeAlgorithm); + function pullAlgorithm() { + return TransformStreamDefaultSourcePullAlgorithm(stream); + } + function cancelAlgorithm(reason) { + return TransformStreamDefaultSourceCancelAlgorithm(stream, reason); + } + stream._readable = CreateReadableStream(startAlgorithm, pullAlgorithm, cancelAlgorithm, readableHighWaterMark, readableSizeAlgorithm); + stream._backpressure = void 0; + stream._backpressureChangePromise = void 0; + stream._backpressureChangePromise_resolve = void 0; + TransformStreamSetBackpressure(stream, true); + stream._transformStreamController = void 0; + } + function IsTransformStream(x2) { + if (!typeIsObject(x2)) { + return false; + } + if (!Object.prototype.hasOwnProperty.call(x2, "_transformStreamController")) { + return false; + } + return x2 instanceof TransformStream2; + } + function TransformStreamError(stream, e2) { + ReadableStreamDefaultControllerError(stream._readable._readableStreamController, e2); + TransformStreamErrorWritableAndUnblockWrite(stream, e2); + } + function TransformStreamErrorWritableAndUnblockWrite(stream, e2) { + TransformStreamDefaultControllerClearAlgorithms(stream._transformStreamController); + WritableStreamDefaultControllerErrorIfNeeded(stream._writable._writableStreamController, e2); + TransformStreamUnblockWrite(stream); + } + function TransformStreamUnblockWrite(stream) { + if (stream._backpressure) { + TransformStreamSetBackpressure(stream, false); + } + } + function TransformStreamSetBackpressure(stream, backpressure) { + if (stream._backpressureChangePromise !== void 0) { + stream._backpressureChangePromise_resolve(); + } + stream._backpressureChangePromise = newPromise((resolve) => { + stream._backpressureChangePromise_resolve = resolve; + }); + stream._backpressure = backpressure; + } + class TransformStreamDefaultController { + constructor() { + throw new TypeError("Illegal constructor"); + } + /** + * Returns the desired size to fill the readable side’s internal queue. It can be negative, if the queue is over-full. + */ + get desiredSize() { + if (!IsTransformStreamDefaultController(this)) { + throw defaultControllerBrandCheckException("desiredSize"); + } + const readableController = this._controlledTransformStream._readable._readableStreamController; + return ReadableStreamDefaultControllerGetDesiredSize(readableController); + } + enqueue(chunk = void 0) { + if (!IsTransformStreamDefaultController(this)) { + throw defaultControllerBrandCheckException("enqueue"); + } + TransformStreamDefaultControllerEnqueue(this, chunk); + } + /** + * Errors both the readable side and the writable side of the controlled transform stream, making all future + * interactions with it fail with the given error `e`. Any chunks queued for transformation will be discarded. + */ + error(reason = void 0) { + if (!IsTransformStreamDefaultController(this)) { + throw defaultControllerBrandCheckException("error"); + } + TransformStreamDefaultControllerError(this, reason); + } + /** + * Closes the readable side and errors the writable side of the controlled transform stream. This is useful when the + * transformer only needs to consume a portion of the chunks written to the writable side. + */ + terminate() { + if (!IsTransformStreamDefaultController(this)) { + throw defaultControllerBrandCheckException("terminate"); + } + TransformStreamDefaultControllerTerminate(this); + } + } + Object.defineProperties(TransformStreamDefaultController.prototype, { + enqueue: { enumerable: true }, + error: { enumerable: true }, + terminate: { enumerable: true }, + desiredSize: { enumerable: true } + }); + setFunctionName(TransformStreamDefaultController.prototype.enqueue, "enqueue"); + setFunctionName(TransformStreamDefaultController.prototype.error, "error"); + setFunctionName(TransformStreamDefaultController.prototype.terminate, "terminate"); + if (typeof Symbol.toStringTag === "symbol") { + Object.defineProperty(TransformStreamDefaultController.prototype, Symbol.toStringTag, { + value: "TransformStreamDefaultController", + configurable: true + }); + } + function IsTransformStreamDefaultController(x2) { + if (!typeIsObject(x2)) { + return false; + } + if (!Object.prototype.hasOwnProperty.call(x2, "_controlledTransformStream")) { + return false; + } + return x2 instanceof TransformStreamDefaultController; + } + function SetUpTransformStreamDefaultController(stream, controller, transformAlgorithm, flushAlgorithm, cancelAlgorithm) { + controller._controlledTransformStream = stream; + stream._transformStreamController = controller; + controller._transformAlgorithm = transformAlgorithm; + controller._flushAlgorithm = flushAlgorithm; + controller._cancelAlgorithm = cancelAlgorithm; + controller._finishPromise = void 0; + controller._finishPromise_resolve = void 0; + controller._finishPromise_reject = void 0; + } + function SetUpTransformStreamDefaultControllerFromTransformer(stream, transformer) { + const controller = Object.create(TransformStreamDefaultController.prototype); + let transformAlgorithm; + let flushAlgorithm; + let cancelAlgorithm; + if (transformer.transform !== void 0) { + transformAlgorithm = (chunk) => transformer.transform(chunk, controller); + } else { + transformAlgorithm = (chunk) => { + try { + TransformStreamDefaultControllerEnqueue(controller, chunk); + return promiseResolvedWith(void 0); + } catch (transformResultE) { + return promiseRejectedWith(transformResultE); + } + }; + } + if (transformer.flush !== void 0) { + flushAlgorithm = () => transformer.flush(controller); + } else { + flushAlgorithm = () => promiseResolvedWith(void 0); + } + if (transformer.cancel !== void 0) { + cancelAlgorithm = (reason) => transformer.cancel(reason); + } else { + cancelAlgorithm = () => promiseResolvedWith(void 0); + } + SetUpTransformStreamDefaultController(stream, controller, transformAlgorithm, flushAlgorithm, cancelAlgorithm); + } + function TransformStreamDefaultControllerClearAlgorithms(controller) { + controller._transformAlgorithm = void 0; + controller._flushAlgorithm = void 0; + controller._cancelAlgorithm = void 0; + } + function TransformStreamDefaultControllerEnqueue(controller, chunk) { + const stream = controller._controlledTransformStream; + const readableController = stream._readable._readableStreamController; + if (!ReadableStreamDefaultControllerCanCloseOrEnqueue(readableController)) { + throw new TypeError("Readable side is not in a state that permits enqueue"); + } + try { + ReadableStreamDefaultControllerEnqueue(readableController, chunk); + } catch (e2) { + TransformStreamErrorWritableAndUnblockWrite(stream, e2); + throw stream._readable._storedError; + } + const backpressure = ReadableStreamDefaultControllerHasBackpressure(readableController); + if (backpressure !== stream._backpressure) { + TransformStreamSetBackpressure(stream, true); + } + } + function TransformStreamDefaultControllerError(controller, e2) { + TransformStreamError(controller._controlledTransformStream, e2); + } + function TransformStreamDefaultControllerPerformTransform(controller, chunk) { + const transformPromise = controller._transformAlgorithm(chunk); + return transformPromiseWith(transformPromise, void 0, (r2) => { + TransformStreamError(controller._controlledTransformStream, r2); + throw r2; + }); + } + function TransformStreamDefaultControllerTerminate(controller) { + const stream = controller._controlledTransformStream; + const readableController = stream._readable._readableStreamController; + ReadableStreamDefaultControllerClose(readableController); + const error = new TypeError("TransformStream terminated"); + TransformStreamErrorWritableAndUnblockWrite(stream, error); + } + function TransformStreamDefaultSinkWriteAlgorithm(stream, chunk) { + const controller = stream._transformStreamController; + if (stream._backpressure) { + const backpressureChangePromise = stream._backpressureChangePromise; + return transformPromiseWith(backpressureChangePromise, () => { + const writable = stream._writable; + const state = writable._state; + if (state === "erroring") { + throw writable._storedError; + } + return TransformStreamDefaultControllerPerformTransform(controller, chunk); + }); + } + return TransformStreamDefaultControllerPerformTransform(controller, chunk); + } + function TransformStreamDefaultSinkAbortAlgorithm(stream, reason) { + const controller = stream._transformStreamController; + if (controller._finishPromise !== void 0) { + return controller._finishPromise; + } + const readable = stream._readable; + controller._finishPromise = newPromise((resolve, reject) => { + controller._finishPromise_resolve = resolve; + controller._finishPromise_reject = reject; + }); + const cancelPromise = controller._cancelAlgorithm(reason); + TransformStreamDefaultControllerClearAlgorithms(controller); + uponPromise(cancelPromise, () => { + if (readable._state === "errored") { + defaultControllerFinishPromiseReject(controller, readable._storedError); + } else { + ReadableStreamDefaultControllerError(readable._readableStreamController, reason); + defaultControllerFinishPromiseResolve(controller); + } + return null; + }, (r2) => { + ReadableStreamDefaultControllerError(readable._readableStreamController, r2); + defaultControllerFinishPromiseReject(controller, r2); + return null; + }); + return controller._finishPromise; + } + function TransformStreamDefaultSinkCloseAlgorithm(stream) { + const controller = stream._transformStreamController; + if (controller._finishPromise !== void 0) { + return controller._finishPromise; + } + const readable = stream._readable; + controller._finishPromise = newPromise((resolve, reject) => { + controller._finishPromise_resolve = resolve; + controller._finishPromise_reject = reject; + }); + const flushPromise = controller._flushAlgorithm(); + TransformStreamDefaultControllerClearAlgorithms(controller); + uponPromise(flushPromise, () => { + if (readable._state === "errored") { + defaultControllerFinishPromiseReject(controller, readable._storedError); + } else { + ReadableStreamDefaultControllerClose(readable._readableStreamController); + defaultControllerFinishPromiseResolve(controller); + } + return null; + }, (r2) => { + ReadableStreamDefaultControllerError(readable._readableStreamController, r2); + defaultControllerFinishPromiseReject(controller, r2); + return null; + }); + return controller._finishPromise; + } + function TransformStreamDefaultSourcePullAlgorithm(stream) { + TransformStreamSetBackpressure(stream, false); + return stream._backpressureChangePromise; + } + function TransformStreamDefaultSourceCancelAlgorithm(stream, reason) { + const controller = stream._transformStreamController; + if (controller._finishPromise !== void 0) { + return controller._finishPromise; + } + const writable = stream._writable; + controller._finishPromise = newPromise((resolve, reject) => { + controller._finishPromise_resolve = resolve; + controller._finishPromise_reject = reject; + }); + const cancelPromise = controller._cancelAlgorithm(reason); + TransformStreamDefaultControllerClearAlgorithms(controller); + uponPromise(cancelPromise, () => { + if (writable._state === "errored") { + defaultControllerFinishPromiseReject(controller, writable._storedError); + } else { + WritableStreamDefaultControllerErrorIfNeeded(writable._writableStreamController, reason); + TransformStreamUnblockWrite(stream); + defaultControllerFinishPromiseResolve(controller); + } + return null; + }, (r2) => { + WritableStreamDefaultControllerErrorIfNeeded(writable._writableStreamController, r2); + TransformStreamUnblockWrite(stream); + defaultControllerFinishPromiseReject(controller, r2); + return null; + }); + return controller._finishPromise; + } + function defaultControllerBrandCheckException(name) { + return new TypeError(`TransformStreamDefaultController.prototype.${name} can only be used on a TransformStreamDefaultController`); + } + function defaultControllerFinishPromiseResolve(controller) { + if (controller._finishPromise_resolve === void 0) { + return; + } + controller._finishPromise_resolve(); + controller._finishPromise_resolve = void 0; + controller._finishPromise_reject = void 0; + } + function defaultControllerFinishPromiseReject(controller, reason) { + if (controller._finishPromise_reject === void 0) { + return; + } + setPromiseIsHandledToTrue(controller._finishPromise); + controller._finishPromise_reject(reason); + controller._finishPromise_resolve = void 0; + controller._finishPromise_reject = void 0; + } + function streamBrandCheckException(name) { + return new TypeError(`TransformStream.prototype.${name} can only be used on a TransformStream`); + } + exports3.ByteLengthQueuingStrategy = ByteLengthQueuingStrategy; + exports3.CountQueuingStrategy = CountQueuingStrategy; + exports3.ReadableByteStreamController = ReadableByteStreamController; + exports3.ReadableStream = ReadableStream2; + exports3.ReadableStreamBYOBReader = ReadableStreamBYOBReader; + exports3.ReadableStreamBYOBRequest = ReadableStreamBYOBRequest; + exports3.ReadableStreamDefaultController = ReadableStreamDefaultController; + exports3.ReadableStreamDefaultReader = ReadableStreamDefaultReader; + exports3.TransformStream = TransformStream2; + exports3.TransformStreamDefaultController = TransformStreamDefaultController; + exports3.WritableStream = WritableStream; + exports3.WritableStreamDefaultController = WritableStreamDefaultController; + exports3.WritableStreamDefaultWriter = WritableStreamDefaultWriter; + }); + } +}); + +// node_modules/fetch-blob/streams.cjs +var require_streams = __commonJS({ + "node_modules/fetch-blob/streams.cjs"() { + "use strict"; + var POOL_SIZE2 = 65536; + if (!globalThis.ReadableStream) { + try { + const process3 = require("process"); + const { emitWarning } = process3; + try { + process3.emitWarning = () => { + }; + Object.assign(globalThis, require("stream/web")); + process3.emitWarning = emitWarning; + } catch (error) { + process3.emitWarning = emitWarning; + throw error; + } + } catch (error) { + Object.assign(globalThis, require_ponyfill_es2018()); + } + } + try { + const { Blob: Blob4 } = require("buffer"); + if (Blob4 && !Blob4.prototype.stream) { + Blob4.prototype.stream = function name(params) { + let position = 0; + const blob = this; + return new ReadableStream({ + type: "bytes", + async pull(ctrl) { + const chunk = blob.slice(position, Math.min(blob.size, position + POOL_SIZE2)); + const buffer = await chunk.arrayBuffer(); + position += buffer.byteLength; + ctrl.enqueue(new Uint8Array(buffer)); + if (position === blob.size) { + ctrl.close(); + } + } + }); + }; + } + } catch (error) { + } + } +}); + +// node_modules/fetch-blob/index.js +async function* toIterator(parts, clone2 = true) { + for (const part of parts) { + if ("stream" in part) { + yield* ( + /** @type {AsyncIterableIterator} */ + part.stream() + ); + } else if (ArrayBuffer.isView(part)) { + if (clone2) { + let position = part.byteOffset; + const end = part.byteOffset + part.byteLength; + while (position !== end) { + const size = Math.min(end - position, POOL_SIZE); + const chunk = part.buffer.slice(position, position + size); + position += chunk.byteLength; + yield new Uint8Array(chunk); + } + } else { + yield part; + } + } else { + let position = 0, b = ( + /** @type {Blob} */ + part + ); + while (position !== b.size) { + const chunk = b.slice(position, Math.min(b.size, position + POOL_SIZE)); + const buffer = await chunk.arrayBuffer(); + position += buffer.byteLength; + yield new Uint8Array(buffer); + } + } + } +} +var import_streams, POOL_SIZE, _Blob, Blob3, fetch_blob_default; +var init_fetch_blob = __esm({ + "node_modules/fetch-blob/index.js"() { + "use strict"; + import_streams = __toESM(require_streams(), 1); + POOL_SIZE = 65536; + _Blob = class Blob2 { + /** @type {Array.<(Blob|Uint8Array)>} */ + #parts = []; + #type = ""; + #size = 0; + #endings = "transparent"; + /** + * The Blob() constructor returns a new Blob object. The content + * of the blob consists of the concatenation of the values given + * in the parameter array. + * + * @param {*} blobParts + * @param {{ type?: string, endings?: string }} [options] + */ + constructor(blobParts = [], options = {}) { + if (typeof blobParts !== "object" || blobParts === null) { + throw new TypeError("Failed to construct 'Blob': The provided value cannot be converted to a sequence."); + } + if (typeof blobParts[Symbol.iterator] !== "function") { + throw new TypeError("Failed to construct 'Blob': The object must have a callable @@iterator property."); + } + if (typeof options !== "object" && typeof options !== "function") { + throw new TypeError("Failed to construct 'Blob': parameter 2 cannot convert to dictionary."); + } + if (options === null) options = {}; + const encoder = new TextEncoder(); + for (const element of blobParts) { + let part; + if (ArrayBuffer.isView(element)) { + part = new Uint8Array(element.buffer.slice(element.byteOffset, element.byteOffset + element.byteLength)); + } else if (element instanceof ArrayBuffer) { + part = new Uint8Array(element.slice(0)); + } else if (element instanceof Blob2) { + part = element; + } else { + part = encoder.encode(`${element}`); + } + this.#size += ArrayBuffer.isView(part) ? part.byteLength : part.size; + this.#parts.push(part); + } + this.#endings = `${options.endings === void 0 ? "transparent" : options.endings}`; + const type = options.type === void 0 ? "" : String(options.type); + this.#type = /^[\x20-\x7E]*$/.test(type) ? type : ""; + } + /** + * The Blob interface's size property returns the + * size of the Blob in bytes. + */ + get size() { + return this.#size; + } + /** + * The type property of a Blob object returns the MIME type of the file. + */ + get type() { + return this.#type; + } + /** + * The text() method in the Blob interface returns a Promise + * that resolves with a string containing the contents of + * the blob, interpreted as UTF-8. + * + * @return {Promise} + */ + async text() { + const decoder = new TextDecoder(); + let str = ""; + for await (const part of toIterator(this.#parts, false)) { + str += decoder.decode(part, { stream: true }); + } + str += decoder.decode(); + return str; + } + /** + * The arrayBuffer() method in the Blob interface returns a + * Promise that resolves with the contents of the blob as + * binary data contained in an ArrayBuffer. + * + * @return {Promise} + */ + async arrayBuffer() { + const data = new Uint8Array(this.size); + let offset = 0; + for await (const chunk of toIterator(this.#parts, false)) { + data.set(chunk, offset); + offset += chunk.length; + } + return data.buffer; + } + stream() { + const it = toIterator(this.#parts, true); + return new globalThis.ReadableStream({ + // @ts-ignore + type: "bytes", + async pull(ctrl) { + const chunk = await it.next(); + chunk.done ? ctrl.close() : ctrl.enqueue(chunk.value); + }, + async cancel() { + await it.return(); + } + }); + } + /** + * The Blob interface's slice() method creates and returns a + * new Blob object which contains data from a subset of the + * blob on which it's called. + * + * @param {number} [start] + * @param {number} [end] + * @param {string} [type] + */ + slice(start = 0, end = this.size, type = "") { + const { size } = this; + let relativeStart = start < 0 ? Math.max(size + start, 0) : Math.min(start, size); + let relativeEnd = end < 0 ? Math.max(size + end, 0) : Math.min(end, size); + const span = Math.max(relativeEnd - relativeStart, 0); + const parts = this.#parts; + const blobParts = []; + let added = 0; + for (const part of parts) { + if (added >= span) { + break; + } + const size2 = ArrayBuffer.isView(part) ? part.byteLength : part.size; + if (relativeStart && size2 <= relativeStart) { + relativeStart -= size2; + relativeEnd -= size2; + } else { + let chunk; + if (ArrayBuffer.isView(part)) { + chunk = part.subarray(relativeStart, Math.min(size2, relativeEnd)); + added += chunk.byteLength; + } else { + chunk = part.slice(relativeStart, Math.min(size2, relativeEnd)); + added += chunk.size; + } + relativeEnd -= size2; + blobParts.push(chunk); + relativeStart = 0; + } + } + const blob = new Blob2([], { type: String(type).toLowerCase() }); + blob.#size = span; + blob.#parts = blobParts; + return blob; + } + get [Symbol.toStringTag]() { + return "Blob"; + } + static [Symbol.hasInstance](object) { + return object && typeof object === "object" && typeof object.constructor === "function" && (typeof object.stream === "function" || typeof object.arrayBuffer === "function") && /^(Blob|File)$/.test(object[Symbol.toStringTag]); + } + }; + Object.defineProperties(_Blob.prototype, { + size: { enumerable: true }, + type: { enumerable: true }, + slice: { enumerable: true } + }); + Blob3 = _Blob; + fetch_blob_default = Blob3; + } +}); + +// node_modules/fetch-blob/file.js +var _File, File2, file_default; +var init_file = __esm({ + "node_modules/fetch-blob/file.js"() { + "use strict"; + init_fetch_blob(); + _File = class File extends fetch_blob_default { + #lastModified = 0; + #name = ""; + /** + * @param {*[]} fileBits + * @param {string} fileName + * @param {{lastModified?: number, type?: string}} options + */ + // @ts-ignore + constructor(fileBits, fileName, options = {}) { + if (arguments.length < 2) { + throw new TypeError(`Failed to construct 'File': 2 arguments required, but only ${arguments.length} present.`); + } + super(fileBits, options); + if (options === null) options = {}; + const lastModified = options.lastModified === void 0 ? Date.now() : Number(options.lastModified); + if (!Number.isNaN(lastModified)) { + this.#lastModified = lastModified; + } + this.#name = String(fileName); + } + get name() { + return this.#name; + } + get lastModified() { + return this.#lastModified; + } + get [Symbol.toStringTag]() { + return "File"; + } + static [Symbol.hasInstance](object) { + return !!object && object instanceof fetch_blob_default && /^(File)$/.test(object[Symbol.toStringTag]); + } + }; + File2 = _File; + file_default = File2; + } +}); + +// node_modules/formdata-polyfill/esm.min.js +function formDataToBlob(F2, B = fetch_blob_default) { + var b = `${r()}${r()}`.replace(/\./g, "").slice(-28).padStart(32, "-"), c = [], p = `--${b}\r +Content-Disposition: form-data; name="`; + F2.forEach((v, n) => typeof v == "string" ? c.push(p + e(n) + `"\r +\r +${v.replace(/\r(?!\n)|(? (a += "", /^(Blob|File)$/.test(b && b[t]) ? [(c = c !== void 0 ? c + "" : b[t] == "File" ? b.name : "blob", a), b.name !== c || b[t] == "blob" ? new file_default([b], c, b) : b] : [a, b + ""]); + e = (c, f3) => (f3 ? c : c.replace(/\r?\n|\r/g, "\r\n")).replace(/\n/g, "%0A").replace(/\r/g, "%0D").replace(/"/g, "%22"); + x = (n, a, e2) => { + if (a.length < e2) { + throw new TypeError(`Failed to execute '${n}' on 'FormData': ${e2} arguments required, but only ${a.length} present.`); + } + }; + FormData = class FormData2 { + #d = []; + constructor(...a) { + if (a.length) throw new TypeError(`Failed to construct 'FormData': parameter 1 is not of type 'HTMLFormElement'.`); + } + get [t]() { + return "FormData"; + } + [i]() { + return this.entries(); + } + static [h](o) { + return o && typeof o === "object" && o[t] === "FormData" && !m.some((m2) => typeof o[m2] != "function"); + } + append(...a) { + x("append", arguments, 2); + this.#d.push(f(...a)); + } + delete(a) { + x("delete", arguments, 1); + a += ""; + this.#d = this.#d.filter(([b]) => b !== a); + } + get(a) { + x("get", arguments, 1); + a += ""; + for (var b = this.#d, l = b.length, c = 0; c < l; c++) if (b[c][0] === a) return b[c][1]; + return null; + } + getAll(a, b) { + x("getAll", arguments, 1); + b = []; + a += ""; + this.#d.forEach((c) => c[0] === a && b.push(c[1])); + return b; + } + has(a) { + x("has", arguments, 1); + a += ""; + return this.#d.some((b) => b[0] === a); + } + forEach(a, b) { + x("forEach", arguments, 1); + for (var [c, d] of this) a.call(b, d, c, this); + } + set(...a) { + x("set", arguments, 2); + var b = [], c = true; + a = f(...a); + this.#d.forEach((d) => { + d[0] === a[0] ? c && (c = !b.push(a)) : b.push(d); + }); + c && b.push(a); + this.#d = b; + } + *entries() { + yield* this.#d; + } + *keys() { + for (var [a] of this) yield a; + } + *values() { + for (var [, a] of this) yield a; + } + }; + } +}); + +// node_modules/node-domexception/index.js +var require_node_domexception = __commonJS({ + "node_modules/node-domexception/index.js"(exports2, module2) { + "use strict"; + if (!globalThis.DOMException) { + try { + const { MessageChannel } = require("worker_threads"), port = new MessageChannel().port1, ab = new ArrayBuffer(); + port.postMessage(ab, [ab, ab]); + } catch (err) { + err.constructor.name === "DOMException" && (globalThis.DOMException = err.constructor); + } + } + module2.exports = globalThis.DOMException; + } +}); + +// node_modules/fetch-blob/from.js +var import_node_fs, import_node_path, import_node_domexception, stat; +var init_from = __esm({ + "node_modules/fetch-blob/from.js"() { + "use strict"; + import_node_fs = require("fs"); + import_node_path = require("path"); + import_node_domexception = __toESM(require_node_domexception(), 1); + init_file(); + init_fetch_blob(); + ({ stat } = import_node_fs.promises); + } +}); + +// node_modules/node-fetch/src/utils/multipart-parser.js +var multipart_parser_exports = {}; +__export(multipart_parser_exports, { + toFormData: () => toFormData +}); +function _fileName(headerValue) { + const m2 = headerValue.match(/\bfilename=("(.*?)"|([^()<>@,;:\\"/[\]?={}\s\t]+))($|;\s)/i); + if (!m2) { + return; + } + const match2 = m2[2] || m2[3] || ""; + let filename = match2.slice(match2.lastIndexOf("\\") + 1); + filename = filename.replace(/%22/g, '"'); + filename = filename.replace(/&#(\d{4});/g, (m3, code) => { + return String.fromCharCode(code); + }); + return filename; +} +async function toFormData(Body2, ct) { + if (!/multipart/i.test(ct)) { + throw new TypeError("Failed to fetch"); + } + const m2 = ct.match(/boundary=(?:"([^"]+)"|([^;]+))/i); + if (!m2) { + throw new TypeError("no or bad content-type header, no multipart boundary"); + } + const parser4 = new MultipartParser(m2[1] || m2[2]); + let headerField; + let headerValue; + let entryValue; + let entryName; + let contentType; + let filename; + const entryChunks = []; + const formData = new FormData(); + const onPartData = (ui8a) => { + entryValue += decoder.decode(ui8a, { stream: true }); + }; + const appendToFile = (ui8a) => { + entryChunks.push(ui8a); + }; + const appendFileToFormData = () => { + const file = new file_default(entryChunks, filename, { type: contentType }); + formData.append(entryName, file); + }; + const appendEntryToFormData = () => { + formData.append(entryName, entryValue); + }; + const decoder = new TextDecoder("utf-8"); + decoder.decode(); + parser4.onPartBegin = function() { + parser4.onPartData = onPartData; + parser4.onPartEnd = appendEntryToFormData; + headerField = ""; + headerValue = ""; + entryValue = ""; + entryName = ""; + contentType = ""; + filename = null; + entryChunks.length = 0; + }; + parser4.onHeaderField = function(ui8a) { + headerField += decoder.decode(ui8a, { stream: true }); + }; + parser4.onHeaderValue = function(ui8a) { + headerValue += decoder.decode(ui8a, { stream: true }); + }; + parser4.onHeaderEnd = function() { + headerValue += decoder.decode(); + headerField = headerField.toLowerCase(); + if (headerField === "content-disposition") { + const m3 = headerValue.match(/\bname=("([^"]*)"|([^()<>@,;:\\"/[\]?={}\s\t]+))/i); + if (m3) { + entryName = m3[2] || m3[3] || ""; + } + filename = _fileName(headerValue); + if (filename) { + parser4.onPartData = appendToFile; + parser4.onPartEnd = appendFileToFormData; + } + } else if (headerField === "content-type") { + contentType = headerValue; + } + headerValue = ""; + headerField = ""; + }; + for await (const chunk of Body2) { + parser4.write(chunk); + } + parser4.end(); + return formData; +} +var s, S, f2, F, LF, CR, SPACE, HYPHEN, COLON, A, Z, lower, noop3, MultipartParser; +var init_multipart_parser = __esm({ + "node_modules/node-fetch/src/utils/multipart-parser.js"() { + "use strict"; + init_from(); + init_esm_min(); + s = 0; + S = { + START_BOUNDARY: s++, + HEADER_FIELD_START: s++, + HEADER_FIELD: s++, + HEADER_VALUE_START: s++, + HEADER_VALUE: s++, + HEADER_VALUE_ALMOST_DONE: s++, + HEADERS_ALMOST_DONE: s++, + PART_DATA_START: s++, + PART_DATA: s++, + END: s++ + }; + f2 = 1; + F = { + PART_BOUNDARY: f2, + LAST_BOUNDARY: f2 *= 2 + }; + LF = 10; + CR = 13; + SPACE = 32; + HYPHEN = 45; + COLON = 58; + A = 97; + Z = 122; + lower = (c) => c | 32; + noop3 = () => { + }; + MultipartParser = class { + /** + * @param {string} boundary + */ + constructor(boundary) { + this.index = 0; + this.flags = 0; + this.onHeaderEnd = noop3; + this.onHeaderField = noop3; + this.onHeadersEnd = noop3; + this.onHeaderValue = noop3; + this.onPartBegin = noop3; + this.onPartData = noop3; + this.onPartEnd = noop3; + this.boundaryChars = {}; + boundary = "\r\n--" + boundary; + const ui8a = new Uint8Array(boundary.length); + for (let i2 = 0; i2 < boundary.length; i2++) { + ui8a[i2] = boundary.charCodeAt(i2); + this.boundaryChars[ui8a[i2]] = true; + } + this.boundary = ui8a; + this.lookbehind = new Uint8Array(this.boundary.length + 8); + this.state = S.START_BOUNDARY; + } + /** + * @param {Uint8Array} data + */ + write(data) { + let i2 = 0; + const length_ = data.length; + let previousIndex = this.index; + let { lookbehind, boundary, boundaryChars, index, state, flags } = this; + const boundaryLength = this.boundary.length; + const boundaryEnd = boundaryLength - 1; + const bufferLength = data.length; + let c; + let cl; + const mark = (name) => { + this[name + "Mark"] = i2; + }; + const clear = (name) => { + delete this[name + "Mark"]; + }; + const callback = (callbackSymbol, start, end, ui8a) => { + if (start === void 0 || start !== end) { + this[callbackSymbol](ui8a && ui8a.subarray(start, end)); + } + }; + const dataCallback = (name, clear2) => { + const markSymbol = name + "Mark"; + if (!(markSymbol in this)) { + return; + } + if (clear2) { + callback(name, this[markSymbol], i2, data); + delete this[markSymbol]; + } else { + callback(name, this[markSymbol], data.length, data); + this[markSymbol] = 0; + } + }; + for (i2 = 0; i2 < length_; i2++) { + c = data[i2]; + switch (state) { + case S.START_BOUNDARY: + if (index === boundary.length - 2) { + if (c === HYPHEN) { + flags |= F.LAST_BOUNDARY; + } else if (c !== CR) { + return; + } + index++; + break; + } else if (index - 1 === boundary.length - 2) { + if (flags & F.LAST_BOUNDARY && c === HYPHEN) { + state = S.END; + flags = 0; + } else if (!(flags & F.LAST_BOUNDARY) && c === LF) { + index = 0; + callback("onPartBegin"); + state = S.HEADER_FIELD_START; + } else { + return; + } + break; + } + if (c !== boundary[index + 2]) { + index = -2; + } + if (c === boundary[index + 2]) { + index++; + } + break; + case S.HEADER_FIELD_START: + state = S.HEADER_FIELD; + mark("onHeaderField"); + index = 0; + // falls through + case S.HEADER_FIELD: + if (c === CR) { + clear("onHeaderField"); + state = S.HEADERS_ALMOST_DONE; + break; + } + index++; + if (c === HYPHEN) { + break; + } + if (c === COLON) { + if (index === 1) { + return; + } + dataCallback("onHeaderField", true); + state = S.HEADER_VALUE_START; + break; + } + cl = lower(c); + if (cl < A || cl > Z) { + return; + } + break; + case S.HEADER_VALUE_START: + if (c === SPACE) { + break; + } + mark("onHeaderValue"); + state = S.HEADER_VALUE; + // falls through + case S.HEADER_VALUE: + if (c === CR) { + dataCallback("onHeaderValue", true); + callback("onHeaderEnd"); + state = S.HEADER_VALUE_ALMOST_DONE; + } + break; + case S.HEADER_VALUE_ALMOST_DONE: + if (c !== LF) { + return; + } + state = S.HEADER_FIELD_START; + break; + case S.HEADERS_ALMOST_DONE: + if (c !== LF) { + return; + } + callback("onHeadersEnd"); + state = S.PART_DATA_START; + break; + case S.PART_DATA_START: + state = S.PART_DATA; + mark("onPartData"); + // falls through + case S.PART_DATA: + previousIndex = index; + if (index === 0) { + i2 += boundaryEnd; + while (i2 < bufferLength && !(data[i2] in boundaryChars)) { + i2 += boundaryLength; + } + i2 -= boundaryEnd; + c = data[i2]; + } + if (index < boundary.length) { + if (boundary[index] === c) { + if (index === 0) { + dataCallback("onPartData", true); + } + index++; + } else { + index = 0; + } + } else if (index === boundary.length) { + index++; + if (c === CR) { + flags |= F.PART_BOUNDARY; + } else if (c === HYPHEN) { + flags |= F.LAST_BOUNDARY; + } else { + index = 0; + } + } else if (index - 1 === boundary.length) { + if (flags & F.PART_BOUNDARY) { + index = 0; + if (c === LF) { + flags &= ~F.PART_BOUNDARY; + callback("onPartEnd"); + callback("onPartBegin"); + state = S.HEADER_FIELD_START; + break; + } + } else if (flags & F.LAST_BOUNDARY) { + if (c === HYPHEN) { + callback("onPartEnd"); + state = S.END; + flags = 0; + } else { + index = 0; + } + } else { + index = 0; + } + } + if (index > 0) { + lookbehind[index - 1] = c; + } else if (previousIndex > 0) { + const _lookbehind = new Uint8Array(lookbehind.buffer, lookbehind.byteOffset, lookbehind.byteLength); + callback("onPartData", 0, previousIndex, _lookbehind); + previousIndex = 0; + mark("onPartData"); + i2--; + } + break; + case S.END: + break; + default: + throw new Error(`Unexpected state entered: ${state}`); + } + } + dataCallback("onHeaderField"); + dataCallback("onHeaderValue"); + dataCallback("onPartData"); + this.index = index; + this.state = state; + this.flags = flags; + } + end() { + if (this.state === S.HEADER_FIELD_START && this.index === 0 || this.state === S.PART_DATA && this.index === this.boundary.length) { + this.onPartEnd(); + } else if (this.state !== S.END) { + throw new Error("MultipartParser.end(): stream ended unexpectedly"); + } + } + }; + } +}); + +// src/main.ts +var import_github = __toESM(require_github(), 1); + +// node_modules/chalk/source/vendor/ansi-styles/index.js +var ANSI_BACKGROUND_OFFSET = 10; +var wrapAnsi16 = (offset = 0) => (code) => `\x1B[${code + offset}m`; +var wrapAnsi256 = (offset = 0) => (code) => `\x1B[${38 + offset};5;${code}m`; +var wrapAnsi16m = (offset = 0) => (red, green, blue) => `\x1B[${38 + offset};2;${red};${green};${blue}m`; +var styles = { + modifier: { + reset: [0, 0], + // 21 isn't widely supported and 22 does the same thing + bold: [1, 22], + dim: [2, 22], + italic: [3, 23], + underline: [4, 24], + overline: [53, 55], + inverse: [7, 27], + hidden: [8, 28], + strikethrough: [9, 29] + }, + color: { + black: [30, 39], + red: [31, 39], + green: [32, 39], + yellow: [33, 39], + blue: [34, 39], + magenta: [35, 39], + cyan: [36, 39], + white: [37, 39], + // Bright color + blackBright: [90, 39], + gray: [90, 39], + // Alias of `blackBright` + grey: [90, 39], + // Alias of `blackBright` + redBright: [91, 39], + greenBright: [92, 39], + yellowBright: [93, 39], + blueBright: [94, 39], + magentaBright: [95, 39], + cyanBright: [96, 39], + whiteBright: [97, 39] + }, + bgColor: { + bgBlack: [40, 49], + bgRed: [41, 49], + bgGreen: [42, 49], + bgYellow: [43, 49], + bgBlue: [44, 49], + bgMagenta: [45, 49], + bgCyan: [46, 49], + bgWhite: [47, 49], + // Bright color + bgBlackBright: [100, 49], + bgGray: [100, 49], + // Alias of `bgBlackBright` + bgGrey: [100, 49], + // Alias of `bgBlackBright` + bgRedBright: [101, 49], + bgGreenBright: [102, 49], + bgYellowBright: [103, 49], + bgBlueBright: [104, 49], + bgMagentaBright: [105, 49], + bgCyanBright: [106, 49], + bgWhiteBright: [107, 49] + } +}; +var modifierNames = Object.keys(styles.modifier); +var foregroundColorNames = Object.keys(styles.color); +var backgroundColorNames = Object.keys(styles.bgColor); +var colorNames = [...foregroundColorNames, ...backgroundColorNames]; +function assembleStyles() { + const codes = /* @__PURE__ */ new Map(); + for (const [groupName, group] of Object.entries(styles)) { + for (const [styleName, style] of Object.entries(group)) { + styles[styleName] = { + open: `\x1B[${style[0]}m`, + close: `\x1B[${style[1]}m` + }; + group[styleName] = styles[styleName]; + codes.set(style[0], style[1]); + } + Object.defineProperty(styles, groupName, { + value: group, + enumerable: false + }); + } + Object.defineProperty(styles, "codes", { + value: codes, + enumerable: false + }); + styles.color.close = "\x1B[39m"; + styles.bgColor.close = "\x1B[49m"; + styles.color.ansi = wrapAnsi16(); + styles.color.ansi256 = wrapAnsi256(); + styles.color.ansi16m = wrapAnsi16m(); + styles.bgColor.ansi = wrapAnsi16(ANSI_BACKGROUND_OFFSET); + styles.bgColor.ansi256 = wrapAnsi256(ANSI_BACKGROUND_OFFSET); + styles.bgColor.ansi16m = wrapAnsi16m(ANSI_BACKGROUND_OFFSET); + Object.defineProperties(styles, { + rgbToAnsi256: { + value(red, green, blue) { + if (red === green && green === blue) { + if (red < 8) { + return 16; + } + if (red > 248) { + return 231; + } + return Math.round((red - 8) / 247 * 24) + 232; + } + return 16 + 36 * Math.round(red / 255 * 5) + 6 * Math.round(green / 255 * 5) + Math.round(blue / 255 * 5); + }, + enumerable: false + }, + hexToRgb: { + value(hex) { + const matches = /[a-f\d]{6}|[a-f\d]{3}/i.exec(hex.toString(16)); + if (!matches) { + return [0, 0, 0]; + } + let [colorString] = matches; + if (colorString.length === 3) { + colorString = [...colorString].map((character) => character + character).join(""); + } + const integer = Number.parseInt(colorString, 16); + return [ + /* eslint-disable no-bitwise */ + integer >> 16 & 255, + integer >> 8 & 255, + integer & 255 + /* eslint-enable no-bitwise */ + ]; + }, + enumerable: false + }, + hexToAnsi256: { + value: (hex) => styles.rgbToAnsi256(...styles.hexToRgb(hex)), + enumerable: false + }, + ansi256ToAnsi: { + value(code) { + if (code < 8) { + return 30 + code; + } + if (code < 16) { + return 90 + (code - 8); + } + let red; + let green; + let blue; + if (code >= 232) { + red = ((code - 232) * 10 + 8) / 255; + green = red; + blue = red; + } else { + code -= 16; + const remainder = code % 36; + red = Math.floor(code / 36) / 5; + green = Math.floor(remainder / 6) / 5; + blue = remainder % 6 / 5; + } + const value = Math.max(red, green, blue) * 2; + if (value === 0) { + return 30; + } + let result = 30 + (Math.round(blue) << 2 | Math.round(green) << 1 | Math.round(red)); + if (value === 2) { + result += 60; + } + return result; + }, + enumerable: false + }, + rgbToAnsi: { + value: (red, green, blue) => styles.ansi256ToAnsi(styles.rgbToAnsi256(red, green, blue)), + enumerable: false + }, + hexToAnsi: { + value: (hex) => styles.ansi256ToAnsi(styles.hexToAnsi256(hex)), + enumerable: false + } + }); + return styles; +} +var ansiStyles = assembleStyles(); +var ansi_styles_default = ansiStyles; + +// node_modules/chalk/source/vendor/supports-color/index.js +var import_node_process = __toESM(require("process"), 1); +var import_node_os = __toESM(require("os"), 1); +var import_node_tty = __toESM(require("tty"), 1); +function hasFlag(flag, argv = globalThis.Deno ? globalThis.Deno.args : import_node_process.default.argv) { + const prefix = flag.startsWith("-") ? "" : flag.length === 1 ? "-" : "--"; + const position = argv.indexOf(prefix + flag); + const terminatorPosition = argv.indexOf("--"); + return position !== -1 && (terminatorPosition === -1 || position < terminatorPosition); +} +var { env } = import_node_process.default; +var flagForceColor; +if (hasFlag("no-color") || hasFlag("no-colors") || hasFlag("color=false") || hasFlag("color=never")) { + flagForceColor = 0; +} else if (hasFlag("color") || hasFlag("colors") || hasFlag("color=true") || hasFlag("color=always")) { + flagForceColor = 1; +} +function envForceColor() { + if ("FORCE_COLOR" in env) { + if (env.FORCE_COLOR === "true") { + return 1; + } + if (env.FORCE_COLOR === "false") { + return 0; + } + return env.FORCE_COLOR.length === 0 ? 1 : Math.min(Number.parseInt(env.FORCE_COLOR, 10), 3); + } +} +function translateLevel(level) { + if (level === 0) { + return false; + } + return { + level, + hasBasic: true, + has256: level >= 2, + has16m: level >= 3 + }; +} +function _supportsColor(haveStream, { streamIsTTY, sniffFlags = true } = {}) { + const noFlagForceColor = envForceColor(); + if (noFlagForceColor !== void 0) { + flagForceColor = noFlagForceColor; + } + const forceColor = sniffFlags ? flagForceColor : noFlagForceColor; + if (forceColor === 0) { + return 0; + } + if (sniffFlags) { + if (hasFlag("color=16m") || hasFlag("color=full") || hasFlag("color=truecolor")) { + return 3; + } + if (hasFlag("color=256")) { + return 2; + } + } + if ("TF_BUILD" in env && "AGENT_NAME" in env) { + return 1; + } + if (haveStream && !streamIsTTY && forceColor === void 0) { + return 0; + } + const min = forceColor || 0; + if (env.TERM === "dumb") { + return min; + } + if (import_node_process.default.platform === "win32") { + const osRelease = import_node_os.default.release().split("."); + if (Number(osRelease[0]) >= 10 && Number(osRelease[2]) >= 10586) { + return Number(osRelease[2]) >= 14931 ? 3 : 2; + } + return 1; + } + if ("CI" in env) { + if ("GITHUB_ACTIONS" in env || "GITEA_ACTIONS" in env) { + return 3; + } + if (["TRAVIS", "CIRCLECI", "APPVEYOR", "GITLAB_CI", "BUILDKITE", "DRONE"].some((sign) => sign in env) || env.CI_NAME === "codeship") { + return 1; + } + return min; + } + if ("TEAMCITY_VERSION" in env) { + return /^(9\.(0*[1-9]\d*)\.|\d{2,}\.)/.test(env.TEAMCITY_VERSION) ? 1 : 0; + } + if (env.COLORTERM === "truecolor") { + return 3; + } + if (env.TERM === "xterm-kitty") { + return 3; + } + if ("TERM_PROGRAM" in env) { + const version = Number.parseInt((env.TERM_PROGRAM_VERSION || "").split(".")[0], 10); + switch (env.TERM_PROGRAM) { + case "iTerm.app": { + return version >= 3 ? 3 : 2; + } + case "Apple_Terminal": { + return 2; + } + } + } + if (/-256(color)?$/i.test(env.TERM)) { + return 2; + } + if (/^screen|^xterm|^vt100|^vt220|^rxvt|color|ansi|cygwin|linux/i.test(env.TERM)) { + return 1; + } + if ("COLORTERM" in env) { + return 1; + } + return min; +} +function createSupportsColor(stream, options = {}) { + const level = _supportsColor(stream, { + streamIsTTY: stream && stream.isTTY, + ...options + }); + return translateLevel(level); +} +var supportsColor = { stdout: createSupportsColor({ isTTY: import_node_tty.default.isatty(1) }), stderr: createSupportsColor({ isTTY: import_node_tty.default.isatty(2) }) }; @@ -49553,7 +50823,7 @@ var import_core = __toESM(require_core(), 1); // node_modules/simple-git/dist/esm/index.js var import_file_exists = __toESM(require_dist(), 1); -var import_debug = __toESM(require_src(), 1); +var import_debug = __toESM(require_src3(), 1); var import_child_process = require("child_process"); var import_promise_deferred = __toESM(require_dist2(), 1); var import_promise_deferred2 = __toESM(require_dist2(), 1); @@ -49743,6 +51013,11 @@ function remove(target, item) { function asArray(source) { return Array.isArray(source) ? source : [source]; } +function asCamelCase(str) { + return str.replace(/[\s-]+(.)/g, (_all, chr) => { + return chr.toUpperCase(); + }); +} function asStringArray(source) { return asArray(source).map(String); } @@ -49796,9 +51071,9 @@ function filterType(input, filter2, def) { } return arguments.length > 2 ? def : void 0; } -function filterPrimitives(input, omit2) { +function filterPrimitives(input, omit3) { const type = isPathSpec(input) ? "string" : typeof input; - return /number|string|boolean/.test(type) && (!omit2 || !omit2.includes(type)); + return /number|string|boolean/.test(type) && (!omit3 || !omit3.includes(type)); } function filterPlainObject(input) { return !!input && objectToString(input) === "[object Object]"; @@ -49987,8 +51262,8 @@ var init_task_options = __esm2({ init_pathspec(); } }); -function callTaskParser(parser3, streams) { - return parser3(streams.stdOut, streams.stdErr); +function callTaskParser(parser4, streams) { + return parser4(streams.stdOut, streams.stdErr); } function parseStringResponse(result, parsers12, texts, trim = true) { asArray(texts).forEach((text) => { @@ -50021,6 +51296,7 @@ __export2(utils_exports, { append: () => append, appendTaskOptions: () => appendTaskOptions, asArray: () => asArray, + asCamelCase: () => asCamelCase, asFunction: () => asFunction, asNumber: () => asNumber, asStringArray: () => asStringArray, @@ -50178,11 +51454,11 @@ __export2(task_exports, { straightThroughBufferTask: () => straightThroughBufferTask, straightThroughStringTask: () => straightThroughStringTask }); -function adhocExecTask(parser3) { +function adhocExecTask(parser4) { return { commands: EMPTY_COMMANDS, format: "empty", - parser: parser3 + parser: parser4 }; } function configurationErrorTask(error) { @@ -51127,6 +52403,47 @@ var init_checkout = __esm2({ init_task(); } }); +function countObjectsResponse() { + return { + count: 0, + garbage: 0, + inPack: 0, + packs: 0, + prunePackable: 0, + size: 0, + sizeGarbage: 0, + sizePack: 0 + }; +} +function count_objects_default() { + return { + countObjects() { + return this._runTask({ + commands: ["count-objects", "--verbose"], + format: "utf-8", + parser(stdOut) { + return parseStringResponse(countObjectsResponse(), [parser2], stdOut); + } + }); + } + }; +} +var parser2; +var init_count_objects = __esm2({ + "src/lib/tasks/count-objects.ts"() { + "use strict"; + init_utils(); + parser2 = new LineParser( + /([a-z-]+): (\d+)$/, + (result, [key, value]) => { + const property = asCamelCase(key); + if (result.hasOwnProperty(property)) { + result[property] = asNumber(value); + } + } + ); + } +}); function parseCommitResult(stdOut) { const result = { author: null, @@ -51353,8 +52670,8 @@ var init_DiffSummary = __esm2({ } }); function getDiffParser(format = "") { - const parser3 = diffSummaryParsers[format]; - return (stdOut) => parseStringResponse(new DiffSummary(), parser3, stdOut, false); + const parser4 = diffSummaryParsers[format]; + return (stdOut) => parseStringResponse(new DiffSummary(), parser4, stdOut, false); } var statParser; var numStatParser; @@ -51446,15 +52763,17 @@ var init_parse_diff_summary = __esm2({ nameStatusParser = [ new LineParser( /([ACDMRTUXB])([0-9]{0,3})\t(.[^\t]*)(\t(.[^\t]*))?$/, - (result, [status, _similarity, from, _to, to]) => { + (result, [status, similarity, from, _to, to]) => { result.changed++; result.files.push({ file: to != null ? to : from, changes: 0, - status: orVoid(isDiffNameStatus(status) && status), insertions: 0, deletions: 0, - binary: false + binary: false, + status: orVoid(isDiffNameStatus(status) && status), + from: orVoid(!!to && from !== to && from), + similarity: asNumber(similarity) }); } ) @@ -51496,15 +52815,12 @@ function createListLogSummaryParser(splitter = SPLITTER, fields = defaultFieldNa const parseDiffResult = getDiffParser(logFormat); return function(stdOut) { const all = toLinesWithContent( - stdOut, - true, + stdOut.trim(), + false, START_BOUNDARY ).map(function(item) { - const lineDetail = item.trim().split(COMMIT_BOUNDARY); - const listLogLine = lineBuilder( - lineDetail[0].trim().split(splitter), - fields - ); + const lineDetail = item.split(COMMIT_BOUNDARY); + const listLogLine = lineBuilder(lineDetail[0].split(splitter), fields); if (lineDetail.length > 1 && !!lineDetail[1].trim()) { listLogLine.diff = parseDiffResult(lineDetail[1]); } @@ -51626,11 +52942,11 @@ function parseLogOptions(opt = {}, customArgs = []) { }; } function logTask(splitter, fields, customArgs) { - const parser3 = createListLogSummaryParser(splitter, fields, logFormatFromCommand(customArgs)); + const parser4 = createListLogSummaryParser(splitter, fields, logFormatFromCommand(customArgs)); return { commands: ["log", ...customArgs], format: "utf-8", - parser: parser3 + parser: parser4 }; } function log_default() { @@ -51965,11 +53281,11 @@ function mergeTask(customArgs) { commands: ["merge", ...customArgs], format: "utf-8", parser(stdOut, stdErr) { - const merge2 = parseMergeResult(stdOut, stdErr); - if (merge2.failed) { - throw new GitResponseError(merge2); + const merge3 = parseMergeResult(stdOut, stdErr); + if (merge3.failed) { + throw new GitResponseError(merge3); } - return merge2; + return merge3; } }; } @@ -52117,16 +53433,16 @@ var FileStatusSummary; var init_FileStatusSummary = __esm2({ "src/lib/responses/FileStatusSummary.ts"() { "use strict"; - fromPathRegex = /^(.+) -> (.+)$/; + fromPathRegex = /^(.+)\0(.+)$/; FileStatusSummary = class { constructor(path2, index, working_dir) { this.path = path2; this.index = index; this.working_dir = working_dir; - if ("R" === index + working_dir) { + if (index === "R" || working_dir === "R") { const detail = fromPathRegex.exec(path2) || [null, path2, path2]; - this.from = detail[1] || ""; - this.path = detail[2] || ""; + this.from = detail[2] || ""; + this.path = detail[1] || ""; } } }; @@ -52139,11 +53455,11 @@ function renamedFile(line) { to }; } -function parser2(indexX, indexY, handler2) { - return [`${indexX}${indexY}`, handler2]; +function parser3(indexX, indexY, handler3) { + return [`${indexX}${indexY}`, handler3]; } function conflicts(indexX, ...indexY) { - return indexY.map((y) => parser2(indexX, y, (result, file) => append(result.conflicted, file))); + return indexY.map((y) => parser3(indexX, y, (result, file) => append(result.conflicted, file))); } function splitLine(result, lineStr) { const trimmed2 = lineStr.trim(); @@ -52157,12 +53473,12 @@ function splitLine(result, lineStr) { } function data(index, workingDir, path2) { const raw = `${index}${workingDir}`; - const handler2 = parsers6.get(raw); - if (handler2) { - handler2(result, path2); + const handler3 = parsers6.get(raw); + if (handler3) { + handler3(result, path2); } if (raw !== "##" && raw !== "!!") { - result.files.push(new FileStatusSummary(path2.replace(/\0.+$/, ""), index, workingDir)); + result.files.push(new FileStatusSummary(path2, index, workingDir)); } } } @@ -52196,58 +53512,58 @@ var init_StatusSummary = __esm2({ } }; parsers6 = new Map([ - parser2( + parser3( " ", "A", (result, file) => append(result.created, file) ), - parser2( + parser3( " ", "D", (result, file) => append(result.deleted, file) ), - parser2( + parser3( " ", "M", (result, file) => append(result.modified, file) ), - parser2( + parser3( "A", " ", (result, file) => append(result.created, file) && append(result.staged, file) ), - parser2( + parser3( "A", "M", (result, file) => append(result.created, file) && append(result.staged, file) && append(result.modified, file) ), - parser2( + parser3( "D", " ", (result, file) => append(result.deleted, file) && append(result.staged, file) ), - parser2( + parser3( "M", " ", (result, file) => append(result.modified, file) && append(result.staged, file) ), - parser2( + parser3( "M", "M", (result, file) => append(result.modified, file) && append(result.staged, file) ), - parser2("R", " ", (result, file) => { + parser3("R", " ", (result, file) => { append(result.renamed, renamedFile(file)); }), - parser2("R", "M", (result, file) => { + parser3("R", "M", (result, file) => { const renamed = renamedFile(file); append(result.renamed, renamed); append(result.modified, renamed.to); }), - parser2("!", "!", (_result, _file) => { + parser3("!", "!", (_result, _file) => { append(_result.ignored = _result.ignored || [], _file); }), - parser2( + parser3( "?", "?", (result, file) => append(result.not_added, file) @@ -52358,7 +53674,7 @@ function versionResponse(major = 0, minor = 0, patch = 0, agent = "", installed function notInstalledResponse() { return versionResponse(0, 0, 0, "", false); } -function version_default2() { +function version_default() { return { version() { return this._runTask({ @@ -52383,7 +53699,7 @@ function versionParser(stdOut) { } var NOT_INSTALLED; var parsers7; -var init_version5 = __esm2({ +var init_version4 = __esm2({ "src/lib/tasks/version.ts"() { "use strict"; init_utils(); @@ -52418,6 +53734,7 @@ var init_simple_git_api = __esm2({ init_task_callback(); init_change_working_directory(); init_checkout(); + init_count_objects(); init_commit(); init_config(); init_first_commit(); @@ -52430,7 +53747,7 @@ var init_simple_git_api = __esm2({ init_show(); init_status(); init_task(); - init_version5(); + init_version4(); init_utils(); SimpleGitApi = class { constructor(_executor) { @@ -52504,8 +53821,8 @@ var init_simple_git_api = __esm2({ trailingFunctionArgument(arguments, false) ); } - outputHandler(handler2) { - this._executor.outputHandler = handler2; + outputHandler(handler3) { + this._executor.outputHandler = handler3; return this; } push() { @@ -52536,11 +53853,12 @@ var init_simple_git_api = __esm2({ checkout_default(), commit_default(), config_default(), + count_objects_default(), first_commit_default(), grep_default(), log_default(), show_default(), - version_default2() + version_default() ); } }); @@ -52766,11 +54084,11 @@ function branchTask(customArgs) { }; } function branchLocalTask() { - const parser3 = parseBranchSummary; + const parser4 = parseBranchSummary; return { format: "utf-8", commands: ["branch", "-v"], - parser: parser3 + parser: parser4 }; } function deleteBranchesTask(branches, forceDelete = false) { @@ -53039,8 +54357,8 @@ function parseGetRemotesVerbose(text) { }); return Object.values(remotes); } -function forEach(text, handler2) { - forEachLineWithContent(text, (line) => handler2(line.split(/\s+/))); +function forEach(text, handler3) { + forEachLineWithContent(text, (line) => handler3(line.split(/\s+/))); } var init_GetRemoteSummary = __esm2({ "src/lib/responses/GetRemoteSummary.ts"() { @@ -53101,7 +54419,7 @@ __export2(stash_list_exports, { function stashListTask(opt = {}, customArgs) { const options = parseLogOptions(opt); const commands = ["stash", "list", ...options.commands, ...customArgs]; - const parser3 = createListLogSummaryParser( + const parser4 = createListLogSummaryParser( options.splitter, options.fields, logFormatFromCommand(commands) @@ -53109,7 +54427,7 @@ function stashListTask(opt = {}, customArgs) { return validateLogFormatConfig(commands) || { commands, format: "utf-8", - parser: parser3 + parser: parser4 }; } var init_stash_list = __esm2({ @@ -53543,20 +54861,20 @@ var require_git = __commonJS2({ return this._catFile("buffer", arguments); }; Git2.prototype._catFile = function(format, args) { - var handler2 = trailingFunctionArgument2(args); + var handler3 = trailingFunctionArgument2(args); var command = ["cat-file"]; var options = args[0]; if (typeof options === "string") { return this._runTask( configurationErrorTask2("Git.catFile: options must be supplied as an array of strings"), - handler2 + handler3 ); } if (Array.isArray(options)) { command.push.apply(command, options); } const task = format === "buffer" ? straightThroughBufferTask2(command) : straightThroughStringTask2(command); - return this._runTask(task, handler2); + return this._runTask(task, handler3); }; Git2.prototype.diff = function(options, then) { const task = filterString2(options) ? configurationErrorTask2( @@ -54105,66 +55423,65 @@ var parseClass = (glob, position) => { let negate = false; let endPos = pos; let rangeStart = ""; - WHILE: - while (i2 < glob.length) { - const c = glob.charAt(i2); - if ((c === "!" || c === "^") && i2 === pos + 1) { - negate = true; + WHILE: while (i2 < glob.length) { + const c = glob.charAt(i2); + if ((c === "!" || c === "^") && i2 === pos + 1) { + negate = true; + i2++; + continue; + } + if (c === "]" && sawStart && !escaping) { + endPos = i2 + 1; + break; + } + sawStart = true; + if (c === "\\") { + if (!escaping) { + escaping = true; i2++; continue; } - if (c === "]" && sawStart && !escaping) { - endPos = i2 + 1; - break; - } - sawStart = true; - if (c === "\\") { - if (!escaping) { - escaping = true; - i2++; - continue; - } - } - if (c === "[" && !escaping) { - for (const [cls, [unip, u, neg]] of Object.entries(posixClasses)) { - if (glob.startsWith(cls, i2)) { - if (rangeStart) { - return ["$.", false, glob.length - pos, true]; - } - i2 += cls.length; - if (neg) - negs.push(unip); - else - ranges.push(unip); - uflag = uflag || u; - continue WHILE; + } + if (c === "[" && !escaping) { + for (const [cls, [unip, u, neg]] of Object.entries(posixClasses)) { + if (glob.startsWith(cls, i2)) { + if (rangeStart) { + return ["$.", false, glob.length - pos, true]; } + i2 += cls.length; + if (neg) + negs.push(unip); + else + ranges.push(unip); + uflag = uflag || u; + continue WHILE; } } - escaping = false; - if (rangeStart) { - if (c > rangeStart) { - ranges.push(braceEscape(rangeStart) + "-" + braceEscape(c)); - } else if (c === rangeStart) { - ranges.push(braceEscape(c)); - } - rangeStart = ""; - i2++; - continue; - } - if (glob.startsWith("-]", i2 + 1)) { - ranges.push(braceEscape(c + "-")); - i2 += 2; - continue; - } - if (glob.startsWith("-", i2 + 1)) { - rangeStart = c; - i2 += 2; - continue; + } + escaping = false; + if (rangeStart) { + if (c > rangeStart) { + ranges.push(braceEscape(rangeStart) + "-" + braceEscape(c)); + } else if (c === rangeStart) { + ranges.push(braceEscape(c)); } - ranges.push(braceEscape(c)); + rangeStart = ""; i2++; + continue; + } + if (glob.startsWith("-]", i2 + 1)) { + ranges.push(braceEscape(c + "-")); + i2 += 2; + continue; + } + if (glob.startsWith("-", i2 + 1)) { + rangeStart = c; + i2 += 2; + continue; } + ranges.push(braceEscape(c)); + i2++; + } if (endPos < i2) { return ["", false, 0, false]; } @@ -54831,948 +56148,4137 @@ var Minimatch = class { if (this.windowsPathsNoEscape) { this.pattern = this.pattern.replace(/\\/g, "/"); } - this.preserveMultipleSlashes = !!options.preserveMultipleSlashes; - this.regexp = null; - this.negate = false; - this.nonegate = !!options.nonegate; - this.comment = false; - this.empty = false; - this.partial = !!options.partial; - this.nocase = !!this.options.nocase; - this.windowsNoMagicRoot = options.windowsNoMagicRoot !== void 0 ? options.windowsNoMagicRoot : !!(this.isWindows && this.nocase); - this.globSet = []; - this.globParts = []; - this.set = []; - this.make(); + this.preserveMultipleSlashes = !!options.preserveMultipleSlashes; + this.regexp = null; + this.negate = false; + this.nonegate = !!options.nonegate; + this.comment = false; + this.empty = false; + this.partial = !!options.partial; + this.nocase = !!this.options.nocase; + this.windowsNoMagicRoot = options.windowsNoMagicRoot !== void 0 ? options.windowsNoMagicRoot : !!(this.isWindows && this.nocase); + this.globSet = []; + this.globParts = []; + this.set = []; + this.make(); + } + hasMagic() { + if (this.options.magicalBraces && this.set.length > 1) { + return true; + } + for (const pattern of this.set) { + for (const part of pattern) { + if (typeof part !== "string") + return true; + } + } + return false; + } + debug(..._) { + } + make() { + const pattern = this.pattern; + const options = this.options; + if (!options.nocomment && pattern.charAt(0) === "#") { + this.comment = true; + return; + } + if (!pattern) { + this.empty = true; + return; + } + this.parseNegate(); + this.globSet = [...new Set(this.braceExpand())]; + if (options.debug) { + this.debug = (...args) => console.error(...args); + } + this.debug(this.pattern, this.globSet); + const rawGlobParts = this.globSet.map((s2) => this.slashSplit(s2)); + this.globParts = this.preprocess(rawGlobParts); + this.debug(this.pattern, this.globParts); + let set2 = this.globParts.map((s2, _, __) => { + if (this.isWindows && this.windowsNoMagicRoot) { + const isUNC = s2[0] === "" && s2[1] === "" && (s2[2] === "?" || !globMagic.test(s2[2])) && !globMagic.test(s2[3]); + const isDrive = /^[a-z]:/i.test(s2[0]); + if (isUNC) { + return [...s2.slice(0, 4), ...s2.slice(4).map((ss) => this.parse(ss))]; + } else if (isDrive) { + return [s2[0], ...s2.slice(1).map((ss) => this.parse(ss))]; + } + } + return s2.map((ss) => this.parse(ss)); + }); + this.debug(this.pattern, set2); + this.set = set2.filter((s2) => s2.indexOf(false) === -1); + if (this.isWindows) { + for (let i2 = 0; i2 < this.set.length; i2++) { + const p = this.set[i2]; + if (p[0] === "" && p[1] === "" && this.globParts[i2][2] === "?" && typeof p[3] === "string" && /^[a-z]:$/i.test(p[3])) { + p[2] = "?"; + } + } + } + this.debug(this.pattern, this.set); + } + // various transforms to equivalent pattern sets that are + // faster to process in a filesystem walk. The goal is to + // eliminate what we can, and push all ** patterns as far + // to the right as possible, even if it increases the number + // of patterns that we have to process. + preprocess(globParts) { + if (this.options.noglobstar) { + for (let i2 = 0; i2 < globParts.length; i2++) { + for (let j = 0; j < globParts[i2].length; j++) { + if (globParts[i2][j] === "**") { + globParts[i2][j] = "*"; + } + } + } + } + const { optimizationLevel = 1 } = this.options; + if (optimizationLevel >= 2) { + globParts = this.firstPhasePreProcess(globParts); + globParts = this.secondPhasePreProcess(globParts); + } else if (optimizationLevel >= 1) { + globParts = this.levelOneOptimize(globParts); + } else { + globParts = this.adjascentGlobstarOptimize(globParts); + } + return globParts; + } + // just get rid of adjascent ** portions + adjascentGlobstarOptimize(globParts) { + return globParts.map((parts) => { + let gs = -1; + while (-1 !== (gs = parts.indexOf("**", gs + 1))) { + let i2 = gs; + while (parts[i2 + 1] === "**") { + i2++; + } + if (i2 !== gs) { + parts.splice(gs, i2 - gs); + } + } + return parts; + }); + } + // get rid of adjascent ** and resolve .. portions + levelOneOptimize(globParts) { + return globParts.map((parts) => { + parts = parts.reduce((set2, part) => { + const prev = set2[set2.length - 1]; + if (part === "**" && prev === "**") { + return set2; + } + if (part === "..") { + if (prev && prev !== ".." && prev !== "." && prev !== "**") { + set2.pop(); + return set2; + } + } + set2.push(part); + return set2; + }, []); + return parts.length === 0 ? [""] : parts; + }); + } + levelTwoFileOptimize(parts) { + if (!Array.isArray(parts)) { + parts = this.slashSplit(parts); + } + let didSomething = false; + do { + didSomething = false; + if (!this.preserveMultipleSlashes) { + for (let i2 = 1; i2 < parts.length - 1; i2++) { + const p = parts[i2]; + if (i2 === 1 && p === "" && parts[0] === "") + continue; + if (p === "." || p === "") { + didSomething = true; + parts.splice(i2, 1); + i2--; + } + } + if (parts[0] === "." && parts.length === 2 && (parts[1] === "." || parts[1] === "")) { + didSomething = true; + parts.pop(); + } + } + let dd = 0; + while (-1 !== (dd = parts.indexOf("..", dd + 1))) { + const p = parts[dd - 1]; + if (p && p !== "." && p !== ".." && p !== "**") { + didSomething = true; + parts.splice(dd - 1, 2); + dd -= 2; + } + } + } while (didSomething); + return parts.length === 0 ? [""] : parts; + } + // First phase: single-pattern processing + //
 is 1 or more portions
+  //  is 1 or more portions
+  // 

is any portion other than ., .., '', or ** + // is . or '' + // + // **/.. is *brutal* for filesystem walking performance, because + // it effectively resets the recursive walk each time it occurs, + // and ** cannot be reduced out by a .. pattern part like a regexp + // or most strings (other than .., ., and '') can be. + // + //

/**/../

/

/ -> {

/../

/

/,

/**/

/

/} + //

// -> 
/
+  // 
/

/../ ->

/
+  // **/**/ -> **/
+  //
+  // **/*/ -> */**/ <== not valid because ** doesn't follow
+  // this WOULD be allowed if ** did follow symlinks, or * didn't
+  firstPhasePreProcess(globParts) {
+    let didSomething = false;
+    do {
+      didSomething = false;
+      for (let parts of globParts) {
+        let gs = -1;
+        while (-1 !== (gs = parts.indexOf("**", gs + 1))) {
+          let gss = gs;
+          while (parts[gss + 1] === "**") {
+            gss++;
+          }
+          if (gss > gs) {
+            parts.splice(gs + 1, gss - gs);
+          }
+          let next = parts[gs + 1];
+          const p = parts[gs + 2];
+          const p2 = parts[gs + 3];
+          if (next !== "..")
+            continue;
+          if (!p || p === "." || p === ".." || !p2 || p2 === "." || p2 === "..") {
+            continue;
+          }
+          didSomething = true;
+          parts.splice(gs, 1);
+          const other = parts.slice(0);
+          other[gs] = "**";
+          globParts.push(other);
+          gs--;
+        }
+        if (!this.preserveMultipleSlashes) {
+          for (let i2 = 1; i2 < parts.length - 1; i2++) {
+            const p = parts[i2];
+            if (i2 === 1 && p === "" && parts[0] === "")
+              continue;
+            if (p === "." || p === "") {
+              didSomething = true;
+              parts.splice(i2, 1);
+              i2--;
+            }
+          }
+          if (parts[0] === "." && parts.length === 2 && (parts[1] === "." || parts[1] === "")) {
+            didSomething = true;
+            parts.pop();
+          }
+        }
+        let dd = 0;
+        while (-1 !== (dd = parts.indexOf("..", dd + 1))) {
+          const p = parts[dd - 1];
+          if (p && p !== "." && p !== ".." && p !== "**") {
+            didSomething = true;
+            const needDot = dd === 1 && parts[dd + 1] === "**";
+            const splin = needDot ? ["."] : [];
+            parts.splice(dd - 1, 2, ...splin);
+            if (parts.length === 0)
+              parts.push("");
+            dd -= 2;
+          }
+        }
+      }
+    } while (didSomething);
+    return globParts;
+  }
+  // second phase: multi-pattern dedupes
+  // {
/*/,
/

/} ->

/*/
+  // {
/,
/} -> 
/
+  // {
/**/,
/} -> 
/**/
+  //
+  // {
/**/,
/**/

/} ->

/**/
+  // ^-- not valid because ** doens't follow symlinks
+  secondPhasePreProcess(globParts) {
+    for (let i2 = 0; i2 < globParts.length - 1; i2++) {
+      for (let j = i2 + 1; j < globParts.length; j++) {
+        const matched = this.partsMatch(globParts[i2], globParts[j], !this.preserveMultipleSlashes);
+        if (matched) {
+          globParts[i2] = [];
+          globParts[j] = matched;
+          break;
+        }
+      }
+    }
+    return globParts.filter((gs) => gs.length);
+  }
+  partsMatch(a, b, emptyGSMatch = false) {
+    let ai = 0;
+    let bi = 0;
+    let result = [];
+    let which = "";
+    while (ai < a.length && bi < b.length) {
+      if (a[ai] === b[bi]) {
+        result.push(which === "b" ? b[bi] : a[ai]);
+        ai++;
+        bi++;
+      } else if (emptyGSMatch && a[ai] === "**" && b[bi] === a[ai + 1]) {
+        result.push(a[ai]);
+        ai++;
+      } else if (emptyGSMatch && b[bi] === "**" && a[ai] === b[bi + 1]) {
+        result.push(b[bi]);
+        bi++;
+      } else if (a[ai] === "*" && b[bi] && (this.options.dot || !b[bi].startsWith(".")) && b[bi] !== "**") {
+        if (which === "b")
+          return false;
+        which = "a";
+        result.push(a[ai]);
+        ai++;
+        bi++;
+      } else if (b[bi] === "*" && a[ai] && (this.options.dot || !a[ai].startsWith(".")) && a[ai] !== "**") {
+        if (which === "a")
+          return false;
+        which = "b";
+        result.push(b[bi]);
+        ai++;
+        bi++;
+      } else {
+        return false;
+      }
+    }
+    return a.length === b.length && result;
+  }
+  parseNegate() {
+    if (this.nonegate)
+      return;
+    const pattern = this.pattern;
+    let negate = false;
+    let negateOffset = 0;
+    for (let i2 = 0; i2 < pattern.length && pattern.charAt(i2) === "!"; i2++) {
+      negate = !negate;
+      negateOffset++;
+    }
+    if (negateOffset)
+      this.pattern = pattern.slice(negateOffset);
+    this.negate = negate;
+  }
+  // set partial to true to test if, for example,
+  // "/a/b" matches the start of "/*/b/*/d"
+  // Partial means, if you run out of file before you run
+  // out of pattern, then that's fine, as long as all
+  // the parts match.
+  matchOne(file, pattern, partial = false) {
+    const options = this.options;
+    if (this.isWindows) {
+      const fileDrive = typeof file[0] === "string" && /^[a-z]:$/i.test(file[0]);
+      const fileUNC = !fileDrive && file[0] === "" && file[1] === "" && file[2] === "?" && /^[a-z]:$/i.test(file[3]);
+      const patternDrive = typeof pattern[0] === "string" && /^[a-z]:$/i.test(pattern[0]);
+      const patternUNC = !patternDrive && pattern[0] === "" && pattern[1] === "" && pattern[2] === "?" && typeof pattern[3] === "string" && /^[a-z]:$/i.test(pattern[3]);
+      const fdi = fileUNC ? 3 : fileDrive ? 0 : void 0;
+      const pdi = patternUNC ? 3 : patternDrive ? 0 : void 0;
+      if (typeof fdi === "number" && typeof pdi === "number") {
+        const [fd, pd] = [file[fdi], pattern[pdi]];
+        if (fd.toLowerCase() === pd.toLowerCase()) {
+          pattern[pdi] = fd;
+          if (pdi > fdi) {
+            pattern = pattern.slice(pdi);
+          } else if (fdi > pdi) {
+            file = file.slice(fdi);
+          }
+        }
+      }
+    }
+    const { optimizationLevel = 1 } = this.options;
+    if (optimizationLevel >= 2) {
+      file = this.levelTwoFileOptimize(file);
+    }
+    this.debug("matchOne", this, { file, pattern });
+    this.debug("matchOne", file.length, pattern.length);
+    for (var fi = 0, pi = 0, fl = file.length, pl = pattern.length; fi < fl && pi < pl; fi++, pi++) {
+      this.debug("matchOne loop");
+      var p = pattern[pi];
+      var f3 = file[fi];
+      this.debug(pattern, p, f3);
+      if (p === false) {
+        return false;
+      }
+      if (p === GLOBSTAR) {
+        this.debug("GLOBSTAR", [pattern, p, f3]);
+        var fr = fi;
+        var pr = pi + 1;
+        if (pr === pl) {
+          this.debug("** at the end");
+          for (; fi < fl; fi++) {
+            if (file[fi] === "." || file[fi] === ".." || !options.dot && file[fi].charAt(0) === ".")
+              return false;
+          }
+          return true;
+        }
+        while (fr < fl) {
+          var swallowee = file[fr];
+          this.debug("\nglobstar while", file, fr, pattern, pr, swallowee);
+          if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) {
+            this.debug("globstar found match!", fr, fl, swallowee);
+            return true;
+          } else {
+            if (swallowee === "." || swallowee === ".." || !options.dot && swallowee.charAt(0) === ".") {
+              this.debug("dot detected!", file, fr, pattern, pr);
+              break;
+            }
+            this.debug("globstar swallow a segment, and continue");
+            fr++;
+          }
+        }
+        if (partial) {
+          this.debug("\n>>> no match, partial?", file, fr, pattern, pr);
+          if (fr === fl) {
+            return true;
+          }
+        }
+        return false;
+      }
+      let hit;
+      if (typeof p === "string") {
+        hit = f3 === p;
+        this.debug("string match", p, f3, hit);
+      } else {
+        hit = p.test(f3);
+        this.debug("pattern match", p, f3, hit);
+      }
+      if (!hit)
+        return false;
+    }
+    if (fi === fl && pi === pl) {
+      return true;
+    } else if (fi === fl) {
+      return partial;
+    } else if (pi === pl) {
+      return fi === fl - 1 && file[fi] === "";
+    } else {
+      throw new Error("wtf?");
+    }
+  }
+  braceExpand() {
+    return braceExpand(this.pattern, this.options);
+  }
+  parse(pattern) {
+    assertValidPattern(pattern);
+    const options = this.options;
+    if (pattern === "**")
+      return GLOBSTAR;
+    if (pattern === "")
+      return "";
+    let m2;
+    let fastTest = null;
+    if (m2 = pattern.match(starRE)) {
+      fastTest = options.dot ? starTestDot : starTest;
+    } else if (m2 = pattern.match(starDotExtRE)) {
+      fastTest = (options.nocase ? options.dot ? starDotExtTestNocaseDot : starDotExtTestNocase : options.dot ? starDotExtTestDot : starDotExtTest)(m2[1]);
+    } else if (m2 = pattern.match(qmarksRE)) {
+      fastTest = (options.nocase ? options.dot ? qmarksTestNocaseDot : qmarksTestNocase : options.dot ? qmarksTestDot : qmarksTest)(m2);
+    } else if (m2 = pattern.match(starDotStarRE)) {
+      fastTest = options.dot ? starDotStarTestDot : starDotStarTest;
+    } else if (m2 = pattern.match(dotStarRE)) {
+      fastTest = dotStarTest;
+    }
+    const re = AST.fromGlob(pattern, this.options).toMMPattern();
+    if (fastTest && typeof re === "object") {
+      Reflect.defineProperty(re, "test", { value: fastTest });
+    }
+    return re;
   }
-  hasMagic() {
-    if (this.options.magicalBraces && this.set.length > 1) {
-      return true;
+  makeRe() {
+    if (this.regexp || this.regexp === false)
+      return this.regexp;
+    const set2 = this.set;
+    if (!set2.length) {
+      this.regexp = false;
+      return this.regexp;
     }
-    for (const pattern of this.set) {
-      for (const part of pattern) {
-        if (typeof part !== "string")
-          return true;
-      }
+    const options = this.options;
+    const twoStar = options.noglobstar ? star2 : options.dot ? twoStarDot : twoStarNoDot;
+    const flags = new Set(options.nocase ? ["i"] : []);
+    let re = set2.map((pattern) => {
+      const pp = pattern.map((p) => {
+        if (p instanceof RegExp) {
+          for (const f3 of p.flags.split(""))
+            flags.add(f3);
+        }
+        return typeof p === "string" ? regExpEscape2(p) : p === GLOBSTAR ? GLOBSTAR : p._src;
+      });
+      pp.forEach((p, i2) => {
+        const next = pp[i2 + 1];
+        const prev = pp[i2 - 1];
+        if (p !== GLOBSTAR || prev === GLOBSTAR) {
+          return;
+        }
+        if (prev === void 0) {
+          if (next !== void 0 && next !== GLOBSTAR) {
+            pp[i2 + 1] = "(?:\\/|" + twoStar + "\\/)?" + next;
+          } else {
+            pp[i2] = twoStar;
+          }
+        } else if (next === void 0) {
+          pp[i2 - 1] = prev + "(?:\\/|" + twoStar + ")?";
+        } else if (next !== GLOBSTAR) {
+          pp[i2 - 1] = prev + "(?:\\/|\\/" + twoStar + "\\/)" + next;
+          pp[i2 + 1] = GLOBSTAR;
+        }
+      });
+      return pp.filter((p) => p !== GLOBSTAR).join("/");
+    }).join("|");
+    const [open, close] = set2.length > 1 ? ["(?:", ")"] : ["", ""];
+    re = "^" + open + re + close + "$";
+    if (this.negate)
+      re = "^(?!" + re + ").+$";
+    try {
+      this.regexp = new RegExp(re, [...flags].join(""));
+    } catch (ex) {
+      this.regexp = false;
     }
-    return false;
+    return this.regexp;
   }
-  debug(..._) {
+  slashSplit(p) {
+    if (this.preserveMultipleSlashes) {
+      return p.split("/");
+    } else if (this.isWindows && /^\/\/[^\/]+/.test(p)) {
+      return ["", ...p.split(/\/+/)];
+    } else {
+      return p.split(/\/+/);
+    }
   }
-  make() {
-    const pattern = this.pattern;
-    const options = this.options;
-    if (!options.nocomment && pattern.charAt(0) === "#") {
-      this.comment = true;
-      return;
+  match(f3, partial = this.partial) {
+    this.debug("match", f3, this.pattern);
+    if (this.comment) {
+      return false;
     }
-    if (!pattern) {
-      this.empty = true;
-      return;
+    if (this.empty) {
+      return f3 === "";
     }
-    this.parseNegate();
-    this.globSet = [...new Set(this.braceExpand())];
-    if (options.debug) {
-      this.debug = (...args) => console.error(...args);
+    if (f3 === "/" && partial) {
+      return true;
     }
-    this.debug(this.pattern, this.globSet);
-    const rawGlobParts = this.globSet.map((s2) => this.slashSplit(s2));
-    this.globParts = this.preprocess(rawGlobParts);
-    this.debug(this.pattern, this.globParts);
-    let set2 = this.globParts.map((s2, _, __) => {
-      if (this.isWindows && this.windowsNoMagicRoot) {
-        const isUNC = s2[0] === "" && s2[1] === "" && (s2[2] === "?" || !globMagic.test(s2[2])) && !globMagic.test(s2[3]);
-        const isDrive = /^[a-z]:/i.test(s2[0]);
-        if (isUNC) {
-          return [...s2.slice(0, 4), ...s2.slice(4).map((ss) => this.parse(ss))];
-        } else if (isDrive) {
-          return [s2[0], ...s2.slice(1).map((ss) => this.parse(ss))];
+    const options = this.options;
+    if (this.isWindows) {
+      f3 = f3.split("\\").join("/");
+    }
+    const ff = this.slashSplit(f3);
+    this.debug(this.pattern, "split", ff);
+    const set2 = this.set;
+    this.debug(this.pattern, "set", set2);
+    let filename = ff[ff.length - 1];
+    if (!filename) {
+      for (let i2 = ff.length - 2; !filename && i2 >= 0; i2--) {
+        filename = ff[i2];
+      }
+    }
+    for (let i2 = 0; i2 < set2.length; i2++) {
+      const pattern = set2[i2];
+      let file = ff;
+      if (options.matchBase && pattern.length === 1) {
+        file = [filename];
+      }
+      const hit = this.matchOne(file, pattern, partial);
+      if (hit) {
+        if (options.flipNegate) {
+          return true;
         }
+        return !this.negate;
       }
-      return s2.map((ss) => this.parse(ss));
-    });
-    this.debug(this.pattern, set2);
-    this.set = set2.filter((s2) => s2.indexOf(false) === -1);
-    if (this.isWindows) {
-      for (let i2 = 0; i2 < this.set.length; i2++) {
-        const p = this.set[i2];
-        if (p[0] === "" && p[1] === "" && this.globParts[i2][2] === "?" && typeof p[3] === "string" && /^[a-z]:$/i.test(p[3])) {
-          p[2] = "?";
+    }
+    if (options.flipNegate) {
+      return false;
+    }
+    return this.negate;
+  }
+  static defaults(def) {
+    return minimatch.defaults(def).Minimatch;
+  }
+};
+minimatch.AST = AST;
+minimatch.Minimatch = Minimatch;
+minimatch.escape = escape;
+minimatch.unescape = unescape2;
+
+// src/get-head-diff-since-review.ts
+var import_path = require("path");
+var getHeadDiffSinceReview = async ({
+  headSha,
+  reviewAssociatedSha,
+  baseBranch
+}) => {
+  const git = simpleGit();
+  const headAndReviewDiff = (await git.diffSummary([`${reviewAssociatedSha}..${headSha}`])).files.map(({ file }) => file);
+  const mainAndSecondCommitDiff = (await git.diffSummary([`origin/${baseBranch}...${headSha}`])).files.map(({ file }) => file);
+  const intersectionFiles = headAndReviewDiff.filter(
+    (file) => mainAndSecondCommitDiff.includes(file)
+  );
+  const diffFiles = [];
+  const fileRenameRegex = /{(.*) => (.*)}/;
+  await Promise.all(
+    intersectionFiles.map(async (file) => {
+      const fileRenameMatch = fileRenameRegex.exec(file);
+      if (fileRenameMatch) {
+        const path1 = (0, import_path.normalize)(
+          file.replace(fileRenameRegex, fileRenameMatch[1])
+        );
+        const path2 = (0, import_path.normalize)(
+          file.replace(fileRenameRegex, fileRenameMatch[2])
+        );
+        diffFiles.push(path1);
+        diffFiles.push(path2);
+        console.debug("Filename change:", path1, path2);
+        return;
+      }
+      const firstFileDiff = await git.diff([
+        `origin/${baseBranch}...${reviewAssociatedSha}`,
+        file
+      ]);
+      const secondFileDiff = await git.diff([
+        `origin/${baseBranch}...${headSha}`,
+        file
+      ]);
+      if (firstFileDiff.split("\n").filter((line) => line.startsWith("+") || line.startsWith("-")).join("\n") !== secondFileDiff.split("\n").filter((line) => line.startsWith("+") || line.startsWith("-")).join("\n")) {
+        diffFiles.push(file);
+      }
+    })
+  );
+  return diffFiles;
+};
+
+// src/group-reviews-by-commit.ts
+var chalk2 = new Chalk({ level: 2 });
+var groupReviewsByCommit = async ({
+  latestReviews,
+  headCommit,
+  baseBranch,
+  ignoreFiles
+}) => {
+  const codeowners = new import_codeowners.default();
+  const git = simpleGit();
+  const reviewsWithoutHistory = [];
+  const groupedReviewsByCommit = {};
+  await Promise.all(
+    latestReviews.map(async (review) => {
+      const reviewCommit = review.commit?.oid;
+      const basehead = `${reviewCommit}..${headCommit}`;
+      if (groupedReviewsByCommit[basehead]) {
+        groupedReviewsByCommit[basehead].reviews.push(review);
+        return;
+      }
+      try {
+        await git.catFile(["commit", reviewCommit]);
+      } catch {
+        console.log(
+          "\n",
+          chalk2.yellow(
+            `Commit '${reviewCommit}' doesn't exist in the history. It may be because it was overwritten by force push or because it's outside of checkout depth.`
+          ),
+          "\n",
+          chalk2.yellow(`Approval by ${review.author?.login} will be removed.`),
+          "\n"
+        );
+        reviewsWithoutHistory.push(review);
+        return;
+      }
+      const filesChangedByHeadCommit = await getHeadDiffSinceReview({
+        reviewAssociatedSha: reviewCommit,
+        headSha: headCommit,
+        baseBranch
+      });
+      (0, import_core.debug)(`Changes in ${basehead}:
+${filesChangedByHeadCommit.join("\n")}`);
+      groupedReviewsByCommit[basehead] = {
+        reviews: [review],
+        // filter out ignored files
+        filesChangedByHeadCommit: filesChangedByHeadCommit.filter(
+          (filename) => !ignoreFiles?.some(
+            (pattern) => minimatch(filename, pattern, { dot: true })
+          )
+        ).map((filename) => ({
+          owners: codeowners.getOwner(filename),
+          filename
+        }))
+      };
+    })
+  );
+  return { reviewsWithoutHistory, groupedReviewsByCommit };
+};
+
+// src/type-guards.ts
+function isPresent(value) {
+  return value != null;
+}
+
+// src/get-team-data.ts
+var getPullRequestQuery = (
+  /* GraphQL */
+  `
+  query getTeamData($orgLogin: String!, $teamSlug: String!, $cursor: String) {
+    organization(login: $orgLogin) {
+      team(slug: $teamSlug) {
+        members(first: 100, after: $cursor) {
+          nodes {
+            login
+          }
+          pageInfo {
+            hasNextPage
+            endCursor
+          }
         }
       }
     }
-    this.debug(this.pattern, this.set);
   }
-  // various transforms to equivalent pattern sets that are
-  // faster to process in a filesystem walk.  The goal is to
-  // eliminate what we can, and push all ** patterns as far
-  // to the right as possible, even if it increases the number
-  // of patterns that we have to process.
-  preprocess(globParts) {
-    if (this.options.noglobstar) {
-      for (let i2 = 0; i2 < globParts.length; i2++) {
-        for (let j = 0; j < globParts[i2].length; j++) {
-          if (globParts[i2][j] === "**") {
-            globParts[i2][j] = "*";
+`
+);
+var getTeamData = async ({
+  octokit,
+  organizationLogin,
+  teamSlug
+}) => {
+  const { organization } = await octokit.graphql.paginate(getPullRequestQuery, {
+    orgLogin: organizationLogin,
+    teamSlug
+  }).catch((e2) => {
+    console.error(
+      "Something went wrong during fetching team members data. Make sure that the github token has read access to organization members."
+    );
+    throw e2;
+  });
+  if (!organization) {
+    throw new Error(`Organization ${organization} could not be found!`);
+  }
+  if (!organization.team) {
+    throw new Error(
+      `Team ${teamSlug} could not be found in ${organizationLogin} organization!`
+    );
+  }
+  if (!organization.team.members.nodes) {
+    throw new Error(
+      `Cannot read members of ${teamSlug} team in ${organizationLogin} organization!`
+    );
+  }
+  return {
+    members: organization.team.members.nodes.filter(isPresent).map(({ login }) => login)
+  };
+};
+
+// src/calculate-reviews-to-dismiss.ts
+var calculateReviewToDismiss = async ({
+  latestReviews,
+  headCommit,
+  baseBranch,
+  ignoreFiles,
+  octokit
+}) => {
+  const { groupedReviewsByCommit, reviewsWithoutHistory } = await groupReviewsByCommit({
+    latestReviews,
+    headCommit,
+    baseBranch,
+    ignoreFiles
+  });
+  const filesWithoutOwner = [
+    ...new Set(
+      Object.values(groupedReviewsByCommit).map(
+        ({ filesChangedByHeadCommit }) => filesChangedByHeadCommit.filter(({ owners }) => !owners.length).map(({ filename }) => filename)
+      ).flat()
+    )
+  ];
+  if (filesWithoutOwner.length) {
+    return {
+      filesWithoutOwner
+    };
+  }
+  const reviewsToDismiss = [...reviewsWithoutHistory];
+  const teamMembers = {};
+  for (const { filesChangedByHeadCommit, reviews } of Object.values(
+    groupedReviewsByCommit
+  )) {
+    const changedFilesOwners = [
+      ...new Set(filesChangedByHeadCommit.map(({ owners }) => owners).flat())
+    ];
+    const changedFilesTeamOwners = changedFilesOwners.filter((owner) => owner.includes("/")).map((teamOwnership) => teamOwnership.replace("@", ""));
+    await Promise.all(
+      changedFilesTeamOwners.filter((team) => !Object.keys(teamMembers).includes(team)).map(async (team) => {
+        const teamHandle = team.split("/");
+        teamMembers[team] = (await getTeamData({
+          octokit,
+          organizationLogin: teamHandle[0],
+          teamSlug: teamHandle[1]
+        })).members;
+      })
+    );
+    await Promise.all(
+      reviews.map((review) => {
+        const { author } = review;
+        let isDismissed = false;
+        console.log(
+          `Considering review from ${author?.login} and file changes between ${review.commit?.oid} (reviewed commit) and ${headCommit} (head commit)`
+        );
+        if (review.commit?.oid === headCommit) {
+          console.log(
+            "The review commit sha is the same as head commit sha. That means that there were no changes since the review, or the base branch was merged/rebased cleanly."
+          );
+        } else if (!author || // if review author is mentioned directly as an owner of changed files, dismiss their review
+        author.login && changedFilesOwners.includes(`@${author.login}`)) {
+          const changedFilesOwnedByReviewAuthor = filesChangedByHeadCommit.filter(
+            ({ owners }) => !!owners.find((owner) => owner === `@${author?.login}`)
+          ).map(({ filename }) => filename);
+          console.log(
+            `Changed files owned by ${author?.login}:
+${changedFilesOwnedByReviewAuthor.join(
+              "\n"
+            )}`
+          );
+          reviewsToDismiss.push(review);
+          isDismissed = true;
+        } else if (!changedFilesTeamOwners.length) {
+          console.log(
+            `Review author ${author?.login} doesn't own any of changed files, nor is member of any team owning changed files.
+The review from ${author?.login} won't be dismissed.
+`
+          );
+        } else {
+          for (const teamOwnership of changedFilesTeamOwners) {
+            if (teamMembers[teamOwnership]?.includes(author.login)) {
+              const changedFilesOwnedByAuthorsTeam = filesChangedByHeadCommit.filter(
+                ({ owners }) => !!owners.find((owner) => owner === `@${teamOwnership}`)
+              ).map(({ filename }) => filename);
+              console.log(
+                `Review author ${author?.login} is member of ${teamOwnership} team, which owns following changed files:
+${changedFilesOwnedByAuthorsTeam.join(
+                  "\n"
+                )}`
+              );
+              reviewsToDismiss.push(review);
+              isDismissed = true;
+            } else {
+              (0, import_core2.debug)(
+                `User ${author.login} is not member of ${teamOwnership} team`
+              );
+            }
+          }
+        }
+        if (isDismissed) {
+          console.log(`The review from ${author?.login} will be dismissed.
+`);
+        } else {
+          console.log(
+            `Review author ${author?.login} doesn't own any of changed files, nor is member of any team owning changed files.
+The review from ${author?.login} won't be dismissed.
+`
+          );
+        }
+      })
+    );
+  }
+  return {
+    reviewsToDismiss,
+    reviewsWithoutHistory
+  };
+};
+
+// src/dismiss-reviews.ts
+var requestReviewsMutation = (
+  /* GraphQL */
+  `
+  mutation dismissReview($message: String!, $pullRequestReviewId: ID!) {
+    dismissPullRequestReview(
+      input: { message: $message, pullRequestReviewId: $pullRequestReviewId }
+    ) {
+      clientMutationId
+    }
+  }
+`
+);
+var dismissReviews = async ({
+  octokit,
+  message,
+  reviewsToDismiss
+}) => Promise.all(
+  reviewsToDismiss.map(async ({ id: pullRequestReviewId, author }) => {
+    try {
+      await octokit.graphql(requestReviewsMutation, {
+        message,
+        pullRequestReviewId
+      });
+    } catch {
+      console.error(`Failed to dismiss review from ${author?.login}.`);
+    }
+  })
+);
+
+// src/get-pr-data.ts
+var getPullRequestQuery2 = (
+  /* GraphQL */
+  `
+  query getPrData($nodeId: ID!, $cursor: String) {
+    node(id: $nodeId) {
+      __typename
+      ... on PullRequest {
+        commits(last: 1) {
+          nodes {
+            commit {
+              oid
+              committedDate
+            }
+          }
+        }
+        latestOpinionatedReviews(first: 100, after: $cursor) {
+          nodes {
+            id
+            state
+            commit {
+              oid
+            }
+            author {
+              __typename
+              login
+              ... on User {
+                id
+              }
+            }
+          }
+          pageInfo {
+            hasNextPage
+            endCursor
           }
         }
       }
     }
-    const { optimizationLevel = 1 } = this.options;
-    if (optimizationLevel >= 2) {
-      globParts = this.firstPhasePreProcess(globParts);
-      globParts = this.secondPhasePreProcess(globParts);
-    } else if (optimizationLevel >= 1) {
-      globParts = this.levelOneOptimize(globParts);
-    } else {
-      globParts = this.adjascentGlobstarOptimize(globParts);
+  }
+`
+);
+var getPrData = async ({
+  octokit,
+  pullRequestId
+}) => {
+  const { node: pullRequest } = await octokit.graphql.paginate(
+    getPullRequestQuery2,
+    {
+      nodeId: pullRequestId
     }
-    return globParts;
+  );
+  if (!pullRequest || pullRequest.__typename !== "PullRequest") {
+    throw new Error("The pull request could not be found!");
   }
-  // just get rid of adjascent ** portions
-  adjascentGlobstarOptimize(globParts) {
-    return globParts.map((parts) => {
-      let gs = -1;
-      while (-1 !== (gs = parts.indexOf("**", gs + 1))) {
-        let i2 = gs;
-        while (parts[i2 + 1] === "**") {
-          i2++;
-        }
-        if (i2 !== gs) {
-          parts.splice(gs, i2 - gs);
-        }
-      }
-      return parts;
-    });
+  if (!pullRequest.commits.nodes) {
+    throw new Error("Pull request commits are missing!");
   }
-  // get rid of adjascent ** and resolve .. portions
-  levelOneOptimize(globParts) {
-    return globParts.map((parts) => {
-      parts = parts.reduce((set2, part) => {
-        const prev = set2[set2.length - 1];
-        if (part === "**" && prev === "**") {
-          return set2;
-        }
-        if (part === "..") {
-          if (prev && prev !== ".." && prev !== "." && prev !== "**") {
-            set2.pop();
-            return set2;
-          }
-        }
-        set2.push(part);
-        return set2;
-      }, []);
-      return parts.length === 0 ? [""] : parts;
-    });
+  return {
+    commits: pullRequest.commits.nodes.filter(isPresent),
+    latestReviews: pullRequest.latestOpinionatedReviews?.nodes?.filter(isPresent) ?? []
+  };
+};
+
+// node_modules/universal-user-agent/index.js
+function getUserAgent2() {
+  if (typeof navigator === "object" && "userAgent" in navigator) {
+    return navigator.userAgent;
   }
-  levelTwoFileOptimize(parts) {
-    if (!Array.isArray(parts)) {
-      parts = this.slashSplit(parts);
+  if (typeof process === "object" && process.version !== void 0) {
+    return `Node.js/${process.version.substr(1)} (${process.platform}; ${process.arch})`;
+  }
+  return "";
+}
+
+// node_modules/@octokit/action/node_modules/@octokit/core/node_modules/before-after-hook/lib/register.js
+function register(state, name, method, options) {
+  if (typeof method !== "function") {
+    throw new Error("method for before hook must be a function");
+  }
+  if (!options) {
+    options = {};
+  }
+  if (Array.isArray(name)) {
+    return name.reverse().reduce((callback, name2) => {
+      return register.bind(null, state, name2, callback, options);
+    }, method)();
+  }
+  return Promise.resolve().then(() => {
+    if (!state.registry[name]) {
+      return method(options);
     }
-    let didSomething = false;
-    do {
-      didSomething = false;
-      if (!this.preserveMultipleSlashes) {
-        for (let i2 = 1; i2 < parts.length - 1; i2++) {
-          const p = parts[i2];
-          if (i2 === 1 && p === "" && parts[0] === "")
-            continue;
-          if (p === "." || p === "") {
-            didSomething = true;
-            parts.splice(i2, 1);
-            i2--;
-          }
-        }
-        if (parts[0] === "." && parts.length === 2 && (parts[1] === "." || parts[1] === "")) {
-          didSomething = true;
-          parts.pop();
-        }
-      }
-      let dd = 0;
-      while (-1 !== (dd = parts.indexOf("..", dd + 1))) {
-        const p = parts[dd - 1];
-        if (p && p !== "." && p !== ".." && p !== "**") {
-          didSomething = true;
-          parts.splice(dd - 1, 2);
-          dd -= 2;
-        }
-      }
-    } while (didSomething);
-    return parts.length === 0 ? [""] : parts;
+    return state.registry[name].reduce((method2, registered) => {
+      return registered.hook.bind(null, method2, options);
+    }, method)();
+  });
+}
+
+// node_modules/@octokit/action/node_modules/@octokit/core/node_modules/before-after-hook/lib/add.js
+function addHook(state, kind, name, hook4) {
+  const orig = hook4;
+  if (!state.registry[name]) {
+    state.registry[name] = [];
   }
-  // First phase: single-pattern processing
-  // 
 is 1 or more portions
-  //  is 1 or more portions
-  // 

is any portion other than ., .., '', or ** - // is . or '' - // - // **/.. is *brutal* for filesystem walking performance, because - // it effectively resets the recursive walk each time it occurs, - // and ** cannot be reduced out by a .. pattern part like a regexp - // or most strings (other than .., ., and '') can be. - // - //

/**/../

/

/ -> {

/../

/

/,

/**/

/

/} - //

// -> 
/
-  // 
/

/../ ->

/
-  // **/**/ -> **/
-  //
-  // **/*/ -> */**/ <== not valid because ** doesn't follow
-  // this WOULD be allowed if ** did follow symlinks, or * didn't
-  firstPhasePreProcess(globParts) {
-    let didSomething = false;
-    do {
-      didSomething = false;
-      for (let parts of globParts) {
-        let gs = -1;
-        while (-1 !== (gs = parts.indexOf("**", gs + 1))) {
-          let gss = gs;
-          while (parts[gss + 1] === "**") {
-            gss++;
-          }
-          if (gss > gs) {
-            parts.splice(gs + 1, gss - gs);
-          }
-          let next = parts[gs + 1];
-          const p = parts[gs + 2];
-          const p2 = parts[gs + 3];
-          if (next !== "..")
-            continue;
-          if (!p || p === "." || p === ".." || !p2 || p2 === "." || p2 === "..") {
-            continue;
-          }
-          didSomething = true;
-          parts.splice(gs, 1);
-          const other = parts.slice(0);
-          other[gs] = "**";
-          globParts.push(other);
-          gs--;
-        }
-        if (!this.preserveMultipleSlashes) {
-          for (let i2 = 1; i2 < parts.length - 1; i2++) {
-            const p = parts[i2];
-            if (i2 === 1 && p === "" && parts[0] === "")
-              continue;
-            if (p === "." || p === "") {
-              didSomething = true;
-              parts.splice(i2, 1);
-              i2--;
-            }
-          }
-          if (parts[0] === "." && parts.length === 2 && (parts[1] === "." || parts[1] === "")) {
-            didSomething = true;
-            parts.pop();
-          }
-        }
-        let dd = 0;
-        while (-1 !== (dd = parts.indexOf("..", dd + 1))) {
-          const p = parts[dd - 1];
-          if (p && p !== "." && p !== ".." && p !== "**") {
-            didSomething = true;
-            const needDot = dd === 1 && parts[dd + 1] === "**";
-            const splin = needDot ? ["."] : [];
-            parts.splice(dd - 1, 2, ...splin);
-            if (parts.length === 0)
-              parts.push("");
-            dd -= 2;
-          }
-        }
-      }
-    } while (didSomething);
-    return globParts;
+  if (kind === "before") {
+    hook4 = (method, options) => {
+      return Promise.resolve().then(orig.bind(null, options)).then(method.bind(null, options));
+    };
   }
-  // second phase: multi-pattern dedupes
-  // {
/*/,
/

/} ->

/*/
-  // {
/,
/} -> 
/
-  // {
/**/,
/} -> 
/**/
-  //
-  // {
/**/,
/**/

/} ->

/**/
-  // ^-- not valid because ** doens't follow symlinks
-  secondPhasePreProcess(globParts) {
-    for (let i2 = 0; i2 < globParts.length - 1; i2++) {
-      for (let j = i2 + 1; j < globParts.length; j++) {
-        const matched = this.partsMatch(globParts[i2], globParts[j], !this.preserveMultipleSlashes);
-        if (!matched)
-          continue;
-        globParts[i2] = matched;
-        globParts[j] = [];
-      }
+  if (kind === "after") {
+    hook4 = (method, options) => {
+      let result;
+      return Promise.resolve().then(method.bind(null, options)).then((result_) => {
+        result = result_;
+        return orig(result, options);
+      }).then(() => {
+        return result;
+      });
+    };
+  }
+  if (kind === "error") {
+    hook4 = (method, options) => {
+      return Promise.resolve().then(method.bind(null, options)).catch((error) => {
+        return orig(error, options);
+      });
+    };
+  }
+  state.registry[name].push({
+    hook: hook4,
+    orig
+  });
+}
+
+// node_modules/@octokit/action/node_modules/@octokit/core/node_modules/before-after-hook/lib/remove.js
+function removeHook(state, name, method) {
+  if (!state.registry[name]) {
+    return;
+  }
+  const index = state.registry[name].map((registered) => {
+    return registered.orig;
+  }).indexOf(method);
+  if (index === -1) {
+    return;
+  }
+  state.registry[name].splice(index, 1);
+}
+
+// node_modules/@octokit/action/node_modules/@octokit/core/node_modules/before-after-hook/index.js
+var bind = Function.bind;
+var bindable = bind.bind(bind);
+function bindApi(hook4, state, name) {
+  const removeHookRef = bindable(removeHook, null).apply(
+    null,
+    name ? [state, name] : [state]
+  );
+  hook4.api = { remove: removeHookRef };
+  hook4.remove = removeHookRef;
+  ["before", "error", "after", "wrap"].forEach((kind) => {
+    const args = name ? [state, kind, name] : [state, kind];
+    hook4[kind] = hook4.api[kind] = bindable(addHook, null).apply(null, args);
+  });
+}
+function Singular() {
+  const singularHookName = Symbol("Singular");
+  const singularHookState = {
+    registry: {}
+  };
+  const singularHook = register.bind(null, singularHookState, singularHookName);
+  bindApi(singularHook, singularHookState, singularHookName);
+  return singularHook;
+}
+function Collection2() {
+  const state = {
+    registry: {}
+  };
+  const hook4 = register.bind(null, state);
+  bindApi(hook4, state);
+  return hook4;
+}
+var before_after_hook_default = { Singular, Collection: Collection2 };
+
+// node_modules/@octokit/endpoint/dist-bundle/index.js
+var VERSION7 = "0.0.0-development";
+var userAgent2 = `octokit-endpoint.js/${VERSION7} ${getUserAgent2()}`;
+var DEFAULTS2 = {
+  method: "GET",
+  baseUrl: "https://api.github.com",
+  headers: {
+    accept: "application/vnd.github.v3+json",
+    "user-agent": userAgent2
+  },
+  mediaType: {
+    format: ""
+  }
+};
+function lowercaseKeys2(object) {
+  if (!object) {
+    return {};
+  }
+  return Object.keys(object).reduce((newObj, key) => {
+    newObj[key.toLowerCase()] = object[key];
+    return newObj;
+  }, {});
+}
+function isPlainObject3(value) {
+  if (typeof value !== "object" || value === null)
+    return false;
+  if (Object.prototype.toString.call(value) !== "[object Object]")
+    return false;
+  const proto2 = Object.getPrototypeOf(value);
+  if (proto2 === null)
+    return true;
+  const Ctor = Object.prototype.hasOwnProperty.call(proto2, "constructor") && proto2.constructor;
+  return typeof Ctor === "function" && Ctor instanceof Ctor && Function.prototype.call(Ctor) === Function.prototype.call(value);
+}
+function mergeDeep2(defaults2, options) {
+  const result = Object.assign({}, defaults2);
+  Object.keys(options).forEach((key) => {
+    if (isPlainObject3(options[key])) {
+      if (!(key in defaults2))
+        Object.assign(result, { [key]: options[key] });
+      else
+        result[key] = mergeDeep2(defaults2[key], options[key]);
+    } else {
+      Object.assign(result, { [key]: options[key] });
+    }
+  });
+  return result;
+}
+function removeUndefinedProperties2(obj) {
+  for (const key in obj) {
+    if (obj[key] === void 0) {
+      delete obj[key];
     }
-    return globParts.filter((gs) => gs.length);
   }
-  partsMatch(a, b, emptyGSMatch = false) {
-    let ai = 0;
-    let bi = 0;
-    let result = [];
-    let which = "";
-    while (ai < a.length && bi < b.length) {
-      if (a[ai] === b[bi]) {
-        result.push(which === "b" ? b[bi] : a[ai]);
-        ai++;
-        bi++;
-      } else if (emptyGSMatch && a[ai] === "**" && b[bi] === a[ai + 1]) {
-        result.push(a[ai]);
-        ai++;
-      } else if (emptyGSMatch && b[bi] === "**" && a[ai] === b[bi + 1]) {
-        result.push(b[bi]);
-        bi++;
-      } else if (a[ai] === "*" && b[bi] && (this.options.dot || !b[bi].startsWith(".")) && b[bi] !== "**") {
-        if (which === "b")
-          return false;
-        which = "a";
-        result.push(a[ai]);
-        ai++;
-        bi++;
-      } else if (b[bi] === "*" && a[ai] && (this.options.dot || !a[ai].startsWith(".")) && a[ai] !== "**") {
-        if (which === "a")
-          return false;
-        which = "b";
-        result.push(b[bi]);
-        ai++;
-        bi++;
-      } else {
-        return false;
-      }
+  return obj;
+}
+function merge2(defaults2, route, options) {
+  if (typeof route === "string") {
+    let [method, url] = route.split(" ");
+    options = Object.assign(url ? { method, url } : { url: method }, options);
+  } else {
+    options = Object.assign({}, route);
+  }
+  options.headers = lowercaseKeys2(options.headers);
+  removeUndefinedProperties2(options);
+  removeUndefinedProperties2(options.headers);
+  const mergedOptions = mergeDeep2(defaults2 || {}, options);
+  if (options.url === "/graphql") {
+    if (defaults2 && defaults2.mediaType.previews?.length) {
+      mergedOptions.mediaType.previews = defaults2.mediaType.previews.filter(
+        (preview) => !mergedOptions.mediaType.previews.includes(preview)
+      ).concat(mergedOptions.mediaType.previews);
     }
-    return a.length === b.length && result;
+    mergedOptions.mediaType.previews = (mergedOptions.mediaType.previews || []).map((preview) => preview.replace(/-preview/, ""));
   }
-  parseNegate() {
-    if (this.nonegate)
-      return;
-    const pattern = this.pattern;
-    let negate = false;
-    let negateOffset = 0;
-    for (let i2 = 0; i2 < pattern.length && pattern.charAt(i2) === "!"; i2++) {
-      negate = !negate;
-      negateOffset++;
+  return mergedOptions;
+}
+function addQueryParameters2(url, parameters) {
+  const separator = /\?/.test(url) ? "&" : "?";
+  const names = Object.keys(parameters);
+  if (names.length === 0) {
+    return url;
+  }
+  return url + separator + names.map((name) => {
+    if (name === "q") {
+      return "q=" + parameters.q.split("+").map(encodeURIComponent).join("+");
     }
-    if (negateOffset)
-      this.pattern = pattern.slice(negateOffset);
-    this.negate = negate;
+    return `${name}=${encodeURIComponent(parameters[name])}`;
+  }).join("&");
+}
+var urlVariableRegex2 = /\{[^}]+\}/g;
+function removeNonChars2(variableName) {
+  return variableName.replace(/^\W+|\W+$/g, "").split(/,/);
+}
+function extractUrlVariableNames2(url) {
+  const matches = url.match(urlVariableRegex2);
+  if (!matches) {
+    return [];
   }
-  // set partial to true to test if, for example,
-  // "/a/b" matches the start of "/*/b/*/d"
-  // Partial means, if you run out of file before you run
-  // out of pattern, then that's fine, as long as all
-  // the parts match.
-  matchOne(file, pattern, partial = false) {
-    const options = this.options;
-    if (this.isWindows) {
-      const fileDrive = typeof file[0] === "string" && /^[a-z]:$/i.test(file[0]);
-      const fileUNC = !fileDrive && file[0] === "" && file[1] === "" && file[2] === "?" && /^[a-z]:$/i.test(file[3]);
-      const patternDrive = typeof pattern[0] === "string" && /^[a-z]:$/i.test(pattern[0]);
-      const patternUNC = !patternDrive && pattern[0] === "" && pattern[1] === "" && pattern[2] === "?" && typeof pattern[3] === "string" && /^[a-z]:$/i.test(pattern[3]);
-      const fdi = fileUNC ? 3 : fileDrive ? 0 : void 0;
-      const pdi = patternUNC ? 3 : patternDrive ? 0 : void 0;
-      if (typeof fdi === "number" && typeof pdi === "number") {
-        const [fd, pd] = [file[fdi], pattern[pdi]];
-        if (fd.toLowerCase() === pd.toLowerCase()) {
-          pattern[pdi] = fd;
-          if (pdi > fdi) {
-            pattern = pattern.slice(pdi);
-          } else if (fdi > pdi) {
-            file = file.slice(fdi);
-          }
-        }
-      }
+  return matches.map(removeNonChars2).reduce((a, b) => a.concat(b), []);
+}
+function omit2(object, keysToOmit) {
+  const result = { __proto__: null };
+  for (const key of Object.keys(object)) {
+    if (keysToOmit.indexOf(key) === -1) {
+      result[key] = object[key];
     }
-    const { optimizationLevel = 1 } = this.options;
-    if (optimizationLevel >= 2) {
-      file = this.levelTwoFileOptimize(file);
+  }
+  return result;
+}
+function encodeReserved2(str) {
+  return str.split(/(%[0-9A-Fa-f]{2})/g).map(function(part) {
+    if (!/%[0-9A-Fa-f]/.test(part)) {
+      part = encodeURI(part).replace(/%5B/g, "[").replace(/%5D/g, "]");
     }
-    this.debug("matchOne", this, { file, pattern });
-    this.debug("matchOne", file.length, pattern.length);
-    for (var fi = 0, pi = 0, fl = file.length, pl = pattern.length; fi < fl && pi < pl; fi++, pi++) {
-      this.debug("matchOne loop");
-      var p = pattern[pi];
-      var f3 = file[fi];
-      this.debug(pattern, p, f3);
-      if (p === false) {
-        return false;
+    return part;
+  }).join("");
+}
+function encodeUnreserved2(str) {
+  return encodeURIComponent(str).replace(/[!'()*]/g, function(c) {
+    return "%" + c.charCodeAt(0).toString(16).toUpperCase();
+  });
+}
+function encodeValue2(operator, value, key) {
+  value = operator === "+" || operator === "#" ? encodeReserved2(value) : encodeUnreserved2(value);
+  if (key) {
+    return encodeUnreserved2(key) + "=" + value;
+  } else {
+    return value;
+  }
+}
+function isDefined2(value) {
+  return value !== void 0 && value !== null;
+}
+function isKeyOperator2(operator) {
+  return operator === ";" || operator === "&" || operator === "?";
+}
+function getValues2(context2, operator, key, modifier) {
+  var value = context2[key], result = [];
+  if (isDefined2(value) && value !== "") {
+    if (typeof value === "string" || typeof value === "number" || typeof value === "boolean") {
+      value = value.toString();
+      if (modifier && modifier !== "*") {
+        value = value.substring(0, parseInt(modifier, 10));
       }
-      if (p === GLOBSTAR) {
-        this.debug("GLOBSTAR", [pattern, p, f3]);
-        var fr = fi;
-        var pr = pi + 1;
-        if (pr === pl) {
-          this.debug("** at the end");
-          for (; fi < fl; fi++) {
-            if (file[fi] === "." || file[fi] === ".." || !options.dot && file[fi].charAt(0) === ".")
-              return false;
-          }
-          return true;
+      result.push(
+        encodeValue2(operator, value, isKeyOperator2(operator) ? key : "")
+      );
+    } else {
+      if (modifier === "*") {
+        if (Array.isArray(value)) {
+          value.filter(isDefined2).forEach(function(value2) {
+            result.push(
+              encodeValue2(operator, value2, isKeyOperator2(operator) ? key : "")
+            );
+          });
+        } else {
+          Object.keys(value).forEach(function(k) {
+            if (isDefined2(value[k])) {
+              result.push(encodeValue2(operator, value[k], k));
+            }
+          });
         }
-        while (fr < fl) {
-          var swallowee = file[fr];
-          this.debug("\nglobstar while", file, fr, pattern, pr, swallowee);
-          if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) {
-            this.debug("globstar found match!", fr, fl, swallowee);
-            return true;
-          } else {
-            if (swallowee === "." || swallowee === ".." || !options.dot && swallowee.charAt(0) === ".") {
-              this.debug("dot detected!", file, fr, pattern, pr);
-              break;
+      } else {
+        const tmp = [];
+        if (Array.isArray(value)) {
+          value.filter(isDefined2).forEach(function(value2) {
+            tmp.push(encodeValue2(operator, value2));
+          });
+        } else {
+          Object.keys(value).forEach(function(k) {
+            if (isDefined2(value[k])) {
+              tmp.push(encodeUnreserved2(k));
+              tmp.push(encodeValue2(operator, value[k].toString()));
             }
-            this.debug("globstar swallow a segment, and continue");
-            fr++;
-          }
+          });
         }
-        if (partial) {
-          this.debug("\n>>> no match, partial?", file, fr, pattern, pr);
-          if (fr === fl) {
-            return true;
-          }
+        if (isKeyOperator2(operator)) {
+          result.push(encodeUnreserved2(key) + "=" + tmp.join(","));
+        } else if (tmp.length !== 0) {
+          result.push(tmp.join(","));
         }
-        return false;
       }
-      let hit;
-      if (typeof p === "string") {
-        hit = f3 === p;
-        this.debug("string match", p, f3, hit);
+    }
+  } else {
+    if (operator === ";") {
+      if (isDefined2(value)) {
+        result.push(encodeUnreserved2(key));
+      }
+    } else if (value === "" && (operator === "&" || operator === "?")) {
+      result.push(encodeUnreserved2(key) + "=");
+    } else if (value === "") {
+      result.push("");
+    }
+  }
+  return result;
+}
+function parseUrl2(template) {
+  return {
+    expand: expand3.bind(null, template)
+  };
+}
+function expand3(template, context2) {
+  var operators = ["+", "#", ".", "/", ";", "?", "&"];
+  template = template.replace(
+    /\{([^\{\}]+)\}|([^\{\}]+)/g,
+    function(_, expression, literal) {
+      if (expression) {
+        let operator = "";
+        const values = [];
+        if (operators.indexOf(expression.charAt(0)) !== -1) {
+          operator = expression.charAt(0);
+          expression = expression.substr(1);
+        }
+        expression.split(/,/g).forEach(function(variable) {
+          var tmp = /([^:\*]*)(?::(\d+)|(\*))?/.exec(variable);
+          values.push(getValues2(context2, operator, tmp[1], tmp[2] || tmp[3]));
+        });
+        if (operator && operator !== "+") {
+          var separator = ",";
+          if (operator === "?") {
+            separator = "&";
+          } else if (operator !== "#") {
+            separator = operator;
+          }
+          return (values.length !== 0 ? operator : "") + values.join(separator);
+        } else {
+          return values.join(",");
+        }
       } else {
-        hit = p.test(f3);
-        this.debug("pattern match", p, f3, hit);
+        return encodeReserved2(literal);
       }
-      if (!hit)
-        return false;
-    }
-    if (fi === fl && pi === pl) {
-      return true;
-    } else if (fi === fl) {
-      return partial;
-    } else if (pi === pl) {
-      return fi === fl - 1 && file[fi] === "";
-    } else {
-      throw new Error("wtf?");
     }
+  );
+  if (template === "/") {
+    return template;
+  } else {
+    return template.replace(/\/$/, "");
   }
-  braceExpand() {
-    return braceExpand(this.pattern, this.options);
+}
+function parse2(options) {
+  let method = options.method.toUpperCase();
+  let url = (options.url || "/").replace(/:([a-z]\w+)/g, "{$1}");
+  let headers = Object.assign({}, options.headers);
+  let body;
+  let parameters = omit2(options, [
+    "method",
+    "baseUrl",
+    "url",
+    "headers",
+    "request",
+    "mediaType"
+  ]);
+  const urlVariableNames = extractUrlVariableNames2(url);
+  url = parseUrl2(url).expand(parameters);
+  if (!/^http/.test(url)) {
+    url = options.baseUrl + url;
   }
-  parse(pattern) {
-    assertValidPattern(pattern);
-    const options = this.options;
-    if (pattern === "**")
-      return GLOBSTAR;
-    if (pattern === "")
-      return "";
-    let m2;
-    let fastTest = null;
-    if (m2 = pattern.match(starRE)) {
-      fastTest = options.dot ? starTestDot : starTest;
-    } else if (m2 = pattern.match(starDotExtRE)) {
-      fastTest = (options.nocase ? options.dot ? starDotExtTestNocaseDot : starDotExtTestNocase : options.dot ? starDotExtTestDot : starDotExtTest)(m2[1]);
-    } else if (m2 = pattern.match(qmarksRE)) {
-      fastTest = (options.nocase ? options.dot ? qmarksTestNocaseDot : qmarksTestNocase : options.dot ? qmarksTestDot : qmarksTest)(m2);
-    } else if (m2 = pattern.match(starDotStarRE)) {
-      fastTest = options.dot ? starDotStarTestDot : starDotStarTest;
-    } else if (m2 = pattern.match(dotStarRE)) {
-      fastTest = dotStarTest;
+  const omittedParameters = Object.keys(options).filter((option) => urlVariableNames.includes(option)).concat("baseUrl");
+  const remainingParameters = omit2(parameters, omittedParameters);
+  const isBinaryRequest = /application\/octet-stream/i.test(headers.accept);
+  if (!isBinaryRequest) {
+    if (options.mediaType.format) {
+      headers.accept = headers.accept.split(/,/).map(
+        (format) => format.replace(
+          /application\/vnd(\.\w+)(\.v3)?(\.\w+)?(\+json)?$/,
+          `application/vnd$1$2.${options.mediaType.format}`
+        )
+      ).join(",");
     }
-    const re = AST.fromGlob(pattern, this.options).toMMPattern();
-    if (fastTest && typeof re === "object") {
-      Reflect.defineProperty(re, "test", { value: fastTest });
+    if (url.endsWith("/graphql")) {
+      if (options.mediaType.previews?.length) {
+        const previewsFromAcceptHeader = headers.accept.match(/[\w-]+(?=-preview)/g) || [];
+        headers.accept = previewsFromAcceptHeader.concat(options.mediaType.previews).map((preview) => {
+          const format = options.mediaType.format ? `.${options.mediaType.format}` : "+json";
+          return `application/vnd.github.${preview}-preview${format}`;
+        }).join(",");
+      }
     }
-    return re;
   }
-  makeRe() {
-    if (this.regexp || this.regexp === false)
-      return this.regexp;
-    const set2 = this.set;
-    if (!set2.length) {
-      this.regexp = false;
-      return this.regexp;
+  if (["GET", "HEAD"].includes(method)) {
+    url = addQueryParameters2(url, remainingParameters);
+  } else {
+    if ("data" in remainingParameters) {
+      body = remainingParameters.data;
+    } else {
+      if (Object.keys(remainingParameters).length) {
+        body = remainingParameters;
+      }
     }
-    const options = this.options;
-    const twoStar = options.noglobstar ? star2 : options.dot ? twoStarDot : twoStarNoDot;
-    const flags = new Set(options.nocase ? ["i"] : []);
-    let re = set2.map((pattern) => {
-      const pp = pattern.map((p) => {
-        if (p instanceof RegExp) {
-          for (const f3 of p.flags.split(""))
-            flags.add(f3);
-        }
-        return typeof p === "string" ? regExpEscape2(p) : p === GLOBSTAR ? GLOBSTAR : p._src;
-      });
-      pp.forEach((p, i2) => {
-        const next = pp[i2 + 1];
-        const prev = pp[i2 - 1];
-        if (p !== GLOBSTAR || prev === GLOBSTAR) {
-          return;
-        }
-        if (prev === void 0) {
-          if (next !== void 0 && next !== GLOBSTAR) {
-            pp[i2 + 1] = "(?:\\/|" + twoStar + "\\/)?" + next;
-          } else {
-            pp[i2] = twoStar;
-          }
-        } else if (next === void 0) {
-          pp[i2 - 1] = prev + "(?:\\/|" + twoStar + ")?";
-        } else if (next !== GLOBSTAR) {
-          pp[i2 - 1] = prev + "(?:\\/|\\/" + twoStar + "\\/)" + next;
-          pp[i2 + 1] = GLOBSTAR;
-        }
+  }
+  if (!headers["content-type"] && typeof body !== "undefined") {
+    headers["content-type"] = "application/json; charset=utf-8";
+  }
+  if (["PATCH", "PUT"].includes(method) && typeof body === "undefined") {
+    body = "";
+  }
+  return Object.assign(
+    { method, url, headers },
+    typeof body !== "undefined" ? { body } : null,
+    options.request ? { request: options.request } : null
+  );
+}
+function endpointWithDefaults2(defaults2, route, options) {
+  return parse2(merge2(defaults2, route, options));
+}
+function withDefaults4(oldDefaults, newDefaults) {
+  const DEFAULTS22 = merge2(oldDefaults, newDefaults);
+  const endpoint22 = endpointWithDefaults2.bind(null, DEFAULTS22);
+  return Object.assign(endpoint22, {
+    DEFAULTS: DEFAULTS22,
+    defaults: withDefaults4.bind(null, DEFAULTS22),
+    merge: merge2.bind(null, DEFAULTS22),
+    parse: parse2
+  });
+}
+var endpoint2 = withDefaults4(null, DEFAULTS2);
+
+// node_modules/@octokit/request/node_modules/@octokit/request-error/dist-src/index.js
+var RequestError2 = class extends Error {
+  name;
+  /**
+   * http status code
+   */
+  status;
+  /**
+   * Request options that lead to the error.
+   */
+  request;
+  /**
+   * Response object if a response was received
+   */
+  response;
+  constructor(message, statusCode, options) {
+    super(message);
+    if (Error.captureStackTrace) {
+      Error.captureStackTrace(this, this.constructor);
+    }
+    this.name = "HttpError";
+    this.status = statusCode;
+    if ("response" in options) {
+      this.response = options.response;
+    }
+    const requestCopy = Object.assign({}, options.request);
+    if (options.request.headers.authorization) {
+      requestCopy.headers = Object.assign({}, options.request.headers, {
+        authorization: options.request.headers.authorization.replace(
+          / .*$/,
+          " [REDACTED]"
+        )
       });
-      return pp.filter((p) => p !== GLOBSTAR).join("/");
-    }).join("|");
-    const [open, close] = set2.length > 1 ? ["(?:", ")"] : ["", ""];
-    re = "^" + open + re + close + "$";
-    if (this.negate)
-      re = "^(?!" + re + ").+$";
-    try {
-      this.regexp = new RegExp(re, [...flags].join(""));
-    } catch (ex) {
-      this.regexp = false;
     }
-    return this.regexp;
+    requestCopy.url = requestCopy.url.replace(/\bclient_secret=\w+/g, "client_secret=[REDACTED]").replace(/\baccess_token=\w+/g, "access_token=[REDACTED]");
+    this.request = requestCopy;
   }
-  slashSplit(p) {
-    if (this.preserveMultipleSlashes) {
-      return p.split("/");
-    } else if (this.isWindows && /^\/\/[^\/]+/.test(p)) {
-      return ["", ...p.split(/\/+/)];
-    } else {
-      return p.split(/\/+/);
-    }
+};
+
+// node_modules/@octokit/request/dist-bundle/index.js
+var VERSION8 = "0.0.0-development";
+function isPlainObject4(value) {
+  if (typeof value !== "object" || value === null)
+    return false;
+  if (Object.prototype.toString.call(value) !== "[object Object]")
+    return false;
+  const proto2 = Object.getPrototypeOf(value);
+  if (proto2 === null)
+    return true;
+  const Ctor = Object.prototype.hasOwnProperty.call(proto2, "constructor") && proto2.constructor;
+  return typeof Ctor === "function" && Ctor instanceof Ctor && Function.prototype.call(Ctor) === Function.prototype.call(value);
+}
+function getBufferResponse2(response) {
+  return response.arrayBuffer();
+}
+function fetchWrapper2(requestOptions) {
+  const log = requestOptions.request && requestOptions.request.log ? requestOptions.request.log : console;
+  const parseSuccessResponseBody = requestOptions.request?.parseSuccessResponseBody !== false;
+  if (isPlainObject4(requestOptions.body) || Array.isArray(requestOptions.body)) {
+    requestOptions.body = JSON.stringify(requestOptions.body);
   }
-  match(f3, partial = this.partial) {
-    this.debug("match", f3, this.pattern);
-    if (this.comment) {
-      return false;
-    }
-    if (this.empty) {
-      return f3 === "";
+  let headers = {};
+  let status;
+  let url;
+  let { fetch: fetch2 } = globalThis;
+  if (requestOptions.request?.fetch) {
+    fetch2 = requestOptions.request.fetch;
+  }
+  if (!fetch2) {
+    throw new Error(
+      "fetch is not set. Please pass a fetch implementation as new Octokit({ request: { fetch }}). Learn more at https://github.com/octokit/octokit.js/#fetch-missing"
+    );
+  }
+  return fetch2(requestOptions.url, {
+    method: requestOptions.method,
+    body: requestOptions.body,
+    // Header values must be `string`
+    headers: Object.fromEntries(
+      Object.entries(requestOptions.headers).map(([name, value]) => [
+        name,
+        String(value)
+      ])
+    ),
+    signal: requestOptions.request?.signal,
+    // duplex must be set if request.body is ReadableStream or Async Iterables.
+    // See https://fetch.spec.whatwg.org/#dom-requestinit-duplex.
+    ...requestOptions.body && { duplex: "half" }
+  }).then(async (response) => {
+    url = response.url;
+    status = response.status;
+    for (const keyAndValue of response.headers) {
+      headers[keyAndValue[0]] = keyAndValue[1];
     }
-    if (f3 === "/" && partial) {
-      return true;
+    if ("deprecation" in headers) {
+      const matches = headers.link && headers.link.match(/<([^>]+)>; rel="deprecation"/);
+      const deprecationLink = matches && matches.pop();
+      log.warn(
+        `[@octokit/request] "${requestOptions.method} ${requestOptions.url}" is deprecated. It is scheduled to be removed on ${headers.sunset}${deprecationLink ? `. See ${deprecationLink}` : ""}`
+      );
     }
-    const options = this.options;
-    if (this.isWindows) {
-      f3 = f3.split("\\").join("/");
+    if (status === 204 || status === 205) {
+      return;
     }
-    const ff = this.slashSplit(f3);
-    this.debug(this.pattern, "split", ff);
-    const set2 = this.set;
-    this.debug(this.pattern, "set", set2);
-    let filename = ff[ff.length - 1];
-    if (!filename) {
-      for (let i2 = ff.length - 2; !filename && i2 >= 0; i2--) {
-        filename = ff[i2];
+    if (requestOptions.method === "HEAD") {
+      if (status < 400) {
+        return;
       }
+      throw new RequestError2(response.statusText, status, {
+        response: {
+          url,
+          status,
+          headers,
+          data: void 0
+        },
+        request: requestOptions
+      });
     }
-    for (let i2 = 0; i2 < set2.length; i2++) {
-      const pattern = set2[i2];
-      let file = ff;
-      if (options.matchBase && pattern.length === 1) {
-        file = [filename];
-      }
-      const hit = this.matchOne(file, pattern, partial);
-      if (hit) {
-        if (options.flipNegate) {
-          return true;
-        }
-        return !this.negate;
+    if (status === 304) {
+      throw new RequestError2("Not modified", status, {
+        response: {
+          url,
+          status,
+          headers,
+          data: await getResponseData2(response)
+        },
+        request: requestOptions
+      });
+    }
+    if (status >= 400) {
+      const data = await getResponseData2(response);
+      const error = new RequestError2(toErrorMessage2(data), status, {
+        response: {
+          url,
+          status,
+          headers,
+          data
+        },
+        request: requestOptions
+      });
+      throw error;
+    }
+    return parseSuccessResponseBody ? await getResponseData2(response) : response.body;
+  }).then((data) => {
+    return {
+      status,
+      url,
+      headers,
+      data
+    };
+  }).catch((error) => {
+    if (error instanceof RequestError2)
+      throw error;
+    else if (error.name === "AbortError")
+      throw error;
+    let message = error.message;
+    if (error.name === "TypeError" && "cause" in error) {
+      if (error.cause instanceof Error) {
+        message = error.cause.message;
+      } else if (typeof error.cause === "string") {
+        message = error.cause;
       }
     }
-    if (options.flipNegate) {
-      return false;
+    throw new RequestError2(message, 500, {
+      request: requestOptions
+    });
+  });
+}
+async function getResponseData2(response) {
+  const contentType = response.headers.get("content-type");
+  if (/application\/json/.test(contentType)) {
+    return response.json().catch(() => response.text()).catch(() => "");
+  }
+  if (!contentType || /^text\/|charset=utf-8$/.test(contentType)) {
+    return response.text();
+  }
+  return getBufferResponse2(response);
+}
+function toErrorMessage2(data) {
+  if (typeof data === "string")
+    return data;
+  let suffix;
+  if ("documentation_url" in data) {
+    suffix = ` - ${data.documentation_url}`;
+  } else {
+    suffix = "";
+  }
+  if ("message" in data) {
+    if (Array.isArray(data.errors)) {
+      return `${data.message}: ${data.errors.map(JSON.stringify).join(", ")}${suffix}`;
     }
-    return this.negate;
+    return `${data.message}${suffix}`;
   }
-  static defaults(def) {
-    return minimatch.defaults(def).Minimatch;
+  return `Unknown error: ${JSON.stringify(data)}`;
+}
+function withDefaults5(oldEndpoint, newDefaults) {
+  const endpoint22 = oldEndpoint.defaults(newDefaults);
+  const newApi = function(route, parameters) {
+    const endpointOptions = endpoint22.merge(route, parameters);
+    if (!endpointOptions.request || !endpointOptions.request.hook) {
+      return fetchWrapper2(endpoint22.parse(endpointOptions));
+    }
+    const request22 = (route2, parameters2) => {
+      return fetchWrapper2(
+        endpoint22.parse(endpoint22.merge(route2, parameters2))
+      );
+    };
+    Object.assign(request22, {
+      endpoint: endpoint22,
+      defaults: withDefaults5.bind(null, endpoint22)
+    });
+    return endpointOptions.request.hook(request22, endpointOptions);
+  };
+  return Object.assign(newApi, {
+    endpoint: endpoint22,
+    defaults: withDefaults5.bind(null, endpoint22)
+  });
+}
+var request2 = withDefaults5(endpoint2, {
+  headers: {
+    "user-agent": `octokit-request.js/${VERSION8} ${getUserAgent2()}`
   }
-};
-minimatch.AST = AST;
-minimatch.Minimatch = Minimatch;
-minimatch.escape = escape;
-minimatch.unescape = unescape2;
+});
 
-// src/get-head-diff-since-review.ts
-var import_path = require("path");
-var getHeadDiffSinceReview = async ({
-  headSha,
-  reviewAssociatedSha,
-  baseBranch
-}) => {
-  const git = simpleGit();
-  const headAndReviewDiff = (await git.diffSummary([`${reviewAssociatedSha}..${headSha}`])).files.map(({ file }) => file);
-  const mainAndSecondCommitDiff = (await git.diffSummary([`origin/${baseBranch}...${headSha}`])).files.map(({ file }) => file);
-  const intersectionFiles = headAndReviewDiff.filter(
-    (file) => mainAndSecondCommitDiff.includes(file)
-  );
-  const diffFiles = [];
-  const fileRenameRegex = /{(.*) => (.*)}/;
-  await Promise.all(
-    intersectionFiles.map(async (file) => {
-      const fileRenameMatch = file.match(fileRenameRegex);
-      if (fileRenameMatch) {
-        const path1 = (0, import_path.normalize)(
-          file.replace(fileRenameRegex, fileRenameMatch[1])
-        );
-        const path2 = (0, import_path.normalize)(
-          file.replace(fileRenameRegex, fileRenameMatch[2])
-        );
-        diffFiles.push(path1);
-        diffFiles.push(path2);
-        console.debug("Filename change:", path1, path2);
-        return;
-      }
-      const firstFileDiff = await git.diff([
-        `origin/${baseBranch}...${reviewAssociatedSha}`,
-        file
-      ]);
-      const secondFileDiff = await git.diff([
-        `origin/${baseBranch}...${headSha}`,
-        file
-      ]);
-      if (firstFileDiff.split("\n").filter((line) => line.startsWith("+") || line.startsWith("-")).join("\n") !== secondFileDiff.split("\n").filter((line) => line.startsWith("+") || line.startsWith("-")).join("\n")) {
-        diffFiles.push(file);
-      }
-    })
-  );
-  return diffFiles;
+// node_modules/@octokit/graphql/dist-bundle/index.js
+var VERSION9 = "0.0.0-development";
+function _buildMessageForResponseErrors2(data) {
+  return `Request failed due to following response errors:
+` + data.errors.map((e2) => ` - ${e2.message}`).join("\n");
+}
+var GraphqlResponseError2 = class extends Error {
+  constructor(request22, headers, response) {
+    super(_buildMessageForResponseErrors2(response));
+    this.request = request22;
+    this.headers = headers;
+    this.response = response;
+    this.errors = response.errors;
+    this.data = response.data;
+    if (Error.captureStackTrace) {
+      Error.captureStackTrace(this, this.constructor);
+    }
+  }
+  name = "GraphqlResponseError";
+  errors;
+  data;
 };
-
-// src/group-reviews-by-commit.ts
-var chalk2 = new Chalk({ level: 2 });
-var groupReviewsByCommit = async ({
-  latestReviews,
-  headCommit,
-  baseBranch,
-  ignoreFiles
-}) => {
-  const codeowners = new import_codeowners.default();
-  const git = simpleGit();
-  const reviewsWithoutHistory = [];
-  const groupedReviewsByCommit = {};
-  await Promise.all(
-    latestReviews.map(async (review) => {
-      const reviewCommit = review.commit?.oid;
-      const basehead = `${reviewCommit}..${headCommit}`;
-      if (groupedReviewsByCommit[basehead]) {
-        groupedReviewsByCommit[basehead].reviews.push(review);
-        return;
-      }
-      try {
-        await git.catFile(["commit", reviewCommit]);
-      } catch {
-        console.log(
-          "\n",
-          chalk2.yellow(
-            `Commit '${reviewCommit}' doesn't exist in the history. It may be because it was overwritten by force push or because it's outside of checkout depth.`
-          ),
-          "\n",
-          chalk2.yellow(`Approval by ${review.author?.login} will be removed.`),
-          "\n"
-        );
-        reviewsWithoutHistory.push(review);
-        return;
+var NON_VARIABLE_OPTIONS2 = [
+  "method",
+  "baseUrl",
+  "url",
+  "headers",
+  "request",
+  "query",
+  "mediaType"
+];
+var FORBIDDEN_VARIABLE_OPTIONS2 = ["query", "method", "url"];
+var GHES_V3_SUFFIX_REGEX2 = /\/api\/v3\/?$/;
+function graphql3(request22, query, options) {
+  if (options) {
+    if (typeof query === "string" && "query" in options) {
+      return Promise.reject(
+        new Error(`[@octokit/graphql] "query" cannot be used as variable name`)
+      );
+    }
+    for (const key in options) {
+      if (!FORBIDDEN_VARIABLE_OPTIONS2.includes(key))
+        continue;
+      return Promise.reject(
+        new Error(
+          `[@octokit/graphql] "${key}" cannot be used as variable name`
+        )
+      );
+    }
+  }
+  const parsedOptions = typeof query === "string" ? Object.assign({ query }, options) : query;
+  const requestOptions = Object.keys(
+    parsedOptions
+  ).reduce((result, key) => {
+    if (NON_VARIABLE_OPTIONS2.includes(key)) {
+      result[key] = parsedOptions[key];
+      return result;
+    }
+    if (!result.variables) {
+      result.variables = {};
+    }
+    result.variables[key] = parsedOptions[key];
+    return result;
+  }, {});
+  const baseUrl = parsedOptions.baseUrl || request22.endpoint.DEFAULTS.baseUrl;
+  if (GHES_V3_SUFFIX_REGEX2.test(baseUrl)) {
+    requestOptions.url = baseUrl.replace(GHES_V3_SUFFIX_REGEX2, "/api/graphql");
+  }
+  return request22(requestOptions).then((response) => {
+    if (response.data.errors) {
+      const headers = {};
+      for (const key of Object.keys(response.headers)) {
+        headers[key] = response.headers[key];
       }
-      const filesChangedByHeadCommit = await getHeadDiffSinceReview({
-        reviewAssociatedSha: reviewCommit,
-        headSha: headCommit,
-        baseBranch
-      });
-      (0, import_core.debug)(`Changes in ${basehead}:
-${filesChangedByHeadCommit.join("\n")}`);
-      groupedReviewsByCommit[basehead] = {
-        reviews: [review],
-        // filter out ignored files
-        filesChangedByHeadCommit: filesChangedByHeadCommit.filter(
-          (filename) => !ignoreFiles?.some(
-            (pattern) => minimatch(filename, pattern, { dot: true })
-          )
-        ).map((filename) => ({
-          owners: codeowners.getOwner(filename),
-          filename
-        }))
-      };
-    })
+      throw new GraphqlResponseError2(
+        requestOptions,
+        headers,
+        response.data
+      );
+    }
+    return response.data.data;
+  });
+}
+function withDefaults6(request22, newDefaults) {
+  const newRequest = request22.defaults(newDefaults);
+  const newApi = (query, options) => {
+    return graphql3(newRequest, query, options);
+  };
+  return Object.assign(newApi, {
+    defaults: withDefaults6.bind(null, newRequest),
+    endpoint: newRequest.endpoint
+  });
+}
+var graphql22 = withDefaults6(request2, {
+  headers: {
+    "user-agent": `octokit-graphql.js/${VERSION9} ${getUserAgent2()}`
+  },
+  method: "POST",
+  url: "/graphql"
+});
+function withCustomRequest2(customRequest) {
+  return withDefaults6(customRequest, {
+    method: "POST",
+    url: "/graphql"
+  });
+}
+
+// node_modules/@octokit/action/node_modules/@octokit/core/node_modules/@octokit/auth-token/dist-bundle/index.js
+var REGEX_IS_INSTALLATION_LEGACY2 = /^v1\./;
+var REGEX_IS_INSTALLATION2 = /^ghs_/;
+var REGEX_IS_USER_TO_SERVER2 = /^ghu_/;
+async function auth2(token) {
+  const isApp = token.split(/\./).length === 3;
+  const isInstallation = REGEX_IS_INSTALLATION_LEGACY2.test(token) || REGEX_IS_INSTALLATION2.test(token);
+  const isUserToServer = REGEX_IS_USER_TO_SERVER2.test(token);
+  const tokenType = isApp ? "app" : isInstallation ? "installation" : isUserToServer ? "user-to-server" : "oauth";
+  return {
+    type: "token",
+    token,
+    tokenType
+  };
+}
+function withAuthorizationPrefix2(token) {
+  if (token.split(/\./).length === 3) {
+    return `bearer ${token}`;
+  }
+  return `token ${token}`;
+}
+async function hook2(token, request3, route, parameters) {
+  const endpoint3 = request3.endpoint.merge(
+    route,
+    parameters
   );
-  return { reviewsWithoutHistory, groupedReviewsByCommit };
+  endpoint3.headers.authorization = withAuthorizationPrefix2(token);
+  return request3(endpoint3);
+}
+var createTokenAuth3 = function createTokenAuth22(token) {
+  if (!token) {
+    throw new Error("[@octokit/auth-token] No token passed to createTokenAuth");
+  }
+  if (typeof token !== "string") {
+    throw new Error(
+      "[@octokit/auth-token] Token passed to createTokenAuth is not a string"
+    );
+  }
+  token = token.replace(/^(token|bearer) +/i, "");
+  return Object.assign(auth2.bind(null, token), {
+    hook: hook2.bind(null, token)
+  });
 };
 
-// src/type-guards.ts
-function isPresent(value) {
-  return value != null;
-}
+// node_modules/@octokit/action/node_modules/@octokit/core/dist-src/version.js
+var VERSION10 = "6.1.2";
 
-// src/get-team-data.ts
-var getPullRequestQuery = (
-  /* GraphQL */
-  `
-  query getTeamData($orgLogin: String!, $teamSlug: String!, $cursor: String) {
-    organization(login: $orgLogin) {
-      team(slug: $teamSlug) {
-        members(first: 100, after: $cursor) {
-          nodes {
-            login
-          }
-          pageInfo {
-            hasNextPage
-            endCursor
-          }
+// node_modules/@octokit/action/node_modules/@octokit/core/dist-src/index.js
+var noop2 = () => {
+};
+var consoleWarn2 = console.warn.bind(console);
+var consoleError2 = console.error.bind(console);
+var userAgentTrail2 = `octokit-core.js/${VERSION10} ${getUserAgent2()}`;
+var Octokit2 = class {
+  static VERSION = VERSION10;
+  static defaults(defaults2) {
+    const OctokitWithDefaults = class extends this {
+      constructor(...args) {
+        const options = args[0] || {};
+        if (typeof defaults2 === "function") {
+          super(defaults2(options));
+          return;
         }
+        super(
+          Object.assign(
+            {},
+            defaults2,
+            options,
+            options.userAgent && defaults2.userAgent ? {
+              userAgent: `${options.userAgent} ${defaults2.userAgent}`
+            } : null
+          )
+        );
+      }
+    };
+    return OctokitWithDefaults;
+  }
+  static plugins = [];
+  /**
+   * Attach a plugin (or many) to your Octokit instance.
+   *
+   * @example
+   * const API = Octokit.plugin(plugin1, plugin2, plugin3, ...)
+   */
+  static plugin(...newPlugins) {
+    const currentPlugins = this.plugins;
+    const NewOctokit = class extends this {
+      static plugins = currentPlugins.concat(
+        newPlugins.filter((plugin) => !currentPlugins.includes(plugin))
+      );
+    };
+    return NewOctokit;
+  }
+  constructor(options = {}) {
+    const hook4 = new before_after_hook_default.Collection();
+    const requestDefaults = {
+      baseUrl: request2.endpoint.DEFAULTS.baseUrl,
+      headers: {},
+      request: Object.assign({}, options.request, {
+        // @ts-ignore internal usage only, no need to type
+        hook: hook4.bind(null, "request")
+      }),
+      mediaType: {
+        previews: [],
+        format: ""
+      }
+    };
+    requestDefaults.headers["user-agent"] = options.userAgent ? `${options.userAgent} ${userAgentTrail2}` : userAgentTrail2;
+    if (options.baseUrl) {
+      requestDefaults.baseUrl = options.baseUrl;
+    }
+    if (options.previews) {
+      requestDefaults.mediaType.previews = options.previews;
+    }
+    if (options.timeZone) {
+      requestDefaults.headers["time-zone"] = options.timeZone;
+    }
+    this.request = request2.defaults(requestDefaults);
+    this.graphql = withCustomRequest2(this.request).defaults(requestDefaults);
+    this.log = Object.assign(
+      {
+        debug: noop2,
+        info: noop2,
+        warn: consoleWarn2,
+        error: consoleError2
+      },
+      options.log
+    );
+    this.hook = hook4;
+    if (!options.authStrategy) {
+      if (!options.auth) {
+        this.auth = async () => ({
+          type: "unauthenticated"
+        });
+      } else {
+        const auth4 = createTokenAuth3(options.auth);
+        hook4.wrap("request", auth4.hook);
+        this.auth = auth4;
       }
+    } else {
+      const { authStrategy, ...otherOptions } = options;
+      const auth4 = authStrategy(
+        Object.assign(
+          {
+            request: this.request,
+            log: this.log,
+            // we pass the current octokit instance as well as its constructor options
+            // to allow for authentication strategies that return a new octokit instance
+            // that shares the same internal state as the current one. The original
+            // requirement for this was the "event-octokit" authentication strategy
+            // of https://github.com/probot/octokit-auth-probot.
+            octokit: this,
+            octokitOptions: otherOptions
+          },
+          options.auth
+        )
+      );
+      hook4.wrap("request", auth4.hook);
+      this.auth = auth4;
+    }
+    const classConstructor = this.constructor;
+    for (let i2 = 0; i2 < classConstructor.plugins.length; ++i2) {
+      Object.assign(this, classConstructor.plugins[i2](this, options));
     }
   }
-`
-);
-var getTeamData = async ({
-  octokit,
-  organizationLogin,
-  teamSlug
-}) => {
-  const { organization } = await octokit.graphql.paginate(getPullRequestQuery, {
-    orgLogin: organizationLogin,
-    teamSlug
-  }).catch((e2) => {
-    console.error(
-      "Something went wrong during fetching team members data. Make sure that the github token has read access to organization members."
+  // assigned during constructor
+  request;
+  graphql;
+  log;
+  hook;
+  // TODO: type `octokit.auth` based on passed options.authStrategy
+  auth;
+};
+
+// node_modules/@octokit/auth-action/node_modules/@octokit/auth-token/dist-bundle/index.js
+var REGEX_IS_INSTALLATION_LEGACY3 = /^v1\./;
+var REGEX_IS_INSTALLATION3 = /^ghs_/;
+var REGEX_IS_USER_TO_SERVER3 = /^ghu_/;
+async function auth3(token) {
+  const isApp = token.split(/\./).length === 3;
+  const isInstallation = REGEX_IS_INSTALLATION_LEGACY3.test(token) || REGEX_IS_INSTALLATION3.test(token);
+  const isUserToServer = REGEX_IS_USER_TO_SERVER3.test(token);
+  const tokenType = isApp ? "app" : isInstallation ? "installation" : isUserToServer ? "user-to-server" : "oauth";
+  return {
+    type: "token",
+    token,
+    tokenType
+  };
+}
+function withAuthorizationPrefix3(token) {
+  if (token.split(/\./).length === 3) {
+    return `bearer ${token}`;
+  }
+  return `token ${token}`;
+}
+async function hook3(token, request3, route, parameters) {
+  const endpoint3 = request3.endpoint.merge(
+    route,
+    parameters
+  );
+  endpoint3.headers.authorization = withAuthorizationPrefix3(token);
+  return request3(endpoint3);
+}
+var createTokenAuth4 = function createTokenAuth23(token) {
+  if (!token) {
+    throw new Error("[@octokit/auth-token] No token passed to createTokenAuth");
+  }
+  if (typeof token !== "string") {
+    throw new Error(
+      "[@octokit/auth-token] Token passed to createTokenAuth is not a string"
     );
-    throw e2;
+  }
+  token = token.replace(/^(token|bearer) +/i, "");
+  return Object.assign(auth3.bind(null, token), {
+    hook: hook3.bind(null, token)
   });
-  if (!organization) {
-    throw new Error(`Organization ${organization} could not be found!`);
+};
+
+// node_modules/@octokit/auth-action/dist-src/index.js
+var createActionAuth = function createActionAuth2() {
+  if (!process.env.GITHUB_ACTION) {
+    throw new Error(
+      "[@octokit/auth-action] `GITHUB_ACTION` environment variable is not set. @octokit/auth-action is meant to be used in GitHub Actions only."
+    );
   }
-  if (!organization.team) {
+  const definitions = [
+    process.env.GITHUB_TOKEN,
+    process.env.INPUT_GITHUB_TOKEN,
+    process.env.INPUT_TOKEN
+  ].filter(Boolean);
+  if (definitions.length === 0) {
     throw new Error(
-      `Team ${teamSlug} could not be found in ${organizationLogin} organization!`
+      "[@octokit/auth-action] `GITHUB_TOKEN` variable is not set. It must be set on either `env:` or `with:`. See https://github.com/octokit/auth-action.js#createactionauth"
     );
   }
-  if (!organization.team.members.nodes) {
+  if (definitions.length > 1) {
     throw new Error(
-      `Cannot read members of ${teamSlug} team in ${organizationLogin} organization!`
+      "[@octokit/auth-action] The token variable is specified more than once. Use either `with.token`, `with.GITHUB_TOKEN`, or `env.GITHUB_TOKEN`. See https://github.com/octokit/auth-action.js#createactionauth"
     );
   }
-  return {
-    members: organization.team.members.nodes.filter(isPresent).map(({ login }) => login)
-  };
+  const token = definitions.pop();
+  return createTokenAuth4(token);
 };
 
-// src/calculate-reviews-to-dismiss.ts
-var calculateReviewToDismiss = async ({
-  latestReviews,
-  headCommit,
-  baseBranch,
-  ignoreFiles,
-  octokit
-}) => {
-  const { groupedReviewsByCommit, reviewsWithoutHistory } = await groupReviewsByCommit({
-    latestReviews,
-    headCommit,
-    baseBranch,
-    ignoreFiles
-  });
-  const filesWithoutOwner = [
-    ...new Set(
-      Object.values(groupedReviewsByCommit).map(
-        ({ filesChangedByHeadCommit }) => filesChangedByHeadCommit.filter(({ owners }) => !owners.length).map(({ filename }) => filename)
-      ).flat()
-    )
-  ];
-  if (filesWithoutOwner.length) {
+// node_modules/@octokit/action/node_modules/@octokit/plugin-paginate-rest/dist-bundle/index.js
+var VERSION11 = "0.0.0-development";
+function normalizePaginatedListResponse2(response) {
+  if (!response.data) {
     return {
-      filesWithoutOwner
+      ...response,
+      data: []
     };
   }
-  const reviewsToDismiss = [...reviewsWithoutHistory];
-  const teamMembers = {};
-  for (const { filesChangedByHeadCommit, reviews } of Object.values(
-    groupedReviewsByCommit
-  )) {
-    const changedFilesOwners = [
-      ...new Set(filesChangedByHeadCommit.map(({ owners }) => owners).flat())
-    ];
-    const changedFilesTeamOwners = changedFilesOwners.filter((owner) => owner.includes("/")).map((teamOwnership) => teamOwnership.replace("@", ""));
-    await Promise.all(
-      changedFilesTeamOwners.filter((team) => !Object.keys(teamMembers).includes(team)).map(async (team) => {
-        const teamHandle = team.split("/");
-        teamMembers[team] = (await getTeamData({
-          octokit,
-          organizationLogin: teamHandle[0],
-          teamSlug: teamHandle[1]
-        })).members;
-      })
-    );
-    await Promise.all(
-      reviews.map((review) => {
-        const { author } = review;
-        let isDismissed = false;
-        console.log(
-          `Considering review from ${author?.login} and file changes between ${review.commit?.oid} (reviewed commit) and ${headCommit} (head commit)`
-        );
-        if (review.commit?.oid === headCommit) {
-          console.log(
-            "The review commit sha is the same as head commit sha. That means that there were no changes since the review, or the base branch was merged/rebased cleanly."
-          );
-        } else if (!author || // if review author is mentioned directly as an owner of changed files, dismiss their review
-        author.login && changedFilesOwners.includes(`@${author.login}`)) {
-          const changedFilesOwnedByReviewAuthor = filesChangedByHeadCommit.filter(
-            ({ owners }) => !!owners.find((owner) => owner === `@${author?.login}`)
-          ).map(({ filename }) => filename);
-          console.log(
-            `Changed files owned by ${author?.login}:
-${changedFilesOwnedByReviewAuthor.join(
-              "\n"
-            )}`
-          );
-          reviewsToDismiss.push(review);
-          isDismissed = true;
-        } else if (!changedFilesTeamOwners.length) {
-          console.log(
-            `Review author ${author?.login} doesn't own any of changed files, nor is member of any team owning changed files.
-The review from ${author?.login} won't be dismissed.
-`
-          );
-        } else {
-          for (const teamOwnership of changedFilesTeamOwners) {
-            if (teamMembers[teamOwnership]?.includes(author.login)) {
-              const changedFilesOwnedByAuthorsTeam = filesChangedByHeadCommit.filter(
-                ({ owners }) => !!owners.find((owner) => owner === `@${teamOwnership}`)
-              ).map(({ filename }) => filename);
-              console.log(
-                `Review author ${author?.login} is member of ${teamOwnership} team, which owns following changed files:
-${changedFilesOwnedByAuthorsTeam.join(
-                  "\n"
-                )}`
-              );
-              reviewsToDismiss.push(review);
-              isDismissed = true;
-            } else {
-              (0, import_core2.debug)(
-                `User ${author.login} is not member of ${teamOwnership} team`
-              );
+  const responseNeedsNormalization = "total_count" in response.data && !("url" in response.data);
+  if (!responseNeedsNormalization) return response;
+  const incompleteResults = response.data.incomplete_results;
+  const repositorySelection = response.data.repository_selection;
+  const totalCount = response.data.total_count;
+  delete response.data.incomplete_results;
+  delete response.data.repository_selection;
+  delete response.data.total_count;
+  const namespaceKey = Object.keys(response.data)[0];
+  const data = response.data[namespaceKey];
+  response.data = data;
+  if (typeof incompleteResults !== "undefined") {
+    response.data.incomplete_results = incompleteResults;
+  }
+  if (typeof repositorySelection !== "undefined") {
+    response.data.repository_selection = repositorySelection;
+  }
+  response.data.total_count = totalCount;
+  return response;
+}
+function iterator2(octokit, route, parameters) {
+  const options = typeof route === "function" ? route.endpoint(parameters) : octokit.request.endpoint(route, parameters);
+  const requestMethod = typeof route === "function" ? route : octokit.request;
+  const method = options.method;
+  const headers = options.headers;
+  let url = options.url;
+  return {
+    [Symbol.asyncIterator]: () => ({
+      async next() {
+        if (!url) return { done: true };
+        try {
+          const response = await requestMethod({ method, url, headers });
+          const normalizedResponse = normalizePaginatedListResponse2(response);
+          url = ((normalizedResponse.headers.link || "").match(
+            /<([^>]+)>;\s*rel="next"/
+          ) || [])[1];
+          return { value: normalizedResponse };
+        } catch (error) {
+          if (error.status !== 409) throw error;
+          url = "";
+          return {
+            value: {
+              status: 200,
+              headers: {},
+              data: []
             }
-          }
-        }
-        if (isDismissed) {
-          console.log(`The review from ${author?.login} will be dismissed.
-`);
-        } else {
-          console.log(
-            `Review author ${author?.login} doesn't own any of changed files, nor is member of any team owning changed files.
-The review from ${author?.login} won't be dismissed.
-`
-          );
+          };
         }
-      })
-    );
+      }
+    })
+  };
+}
+function paginate2(octokit, route, parameters, mapFn) {
+  if (typeof parameters === "function") {
+    mapFn = parameters;
+    parameters = void 0;
   }
+  return gather2(
+    octokit,
+    [],
+    iterator2(octokit, route, parameters)[Symbol.asyncIterator](),
+    mapFn
+  );
+}
+function gather2(octokit, results, iterator22, mapFn) {
+  return iterator22.next().then((result) => {
+    if (result.done) {
+      return results;
+    }
+    let earlyExit = false;
+    function done() {
+      earlyExit = true;
+    }
+    results = results.concat(
+      mapFn ? mapFn(result.value, done) : result.value.data
+    );
+    if (earlyExit) {
+      return results;
+    }
+    return gather2(octokit, results, iterator22, mapFn);
+  });
+}
+var composePaginateRest2 = Object.assign(paginate2, {
+  iterator: iterator2
+});
+function paginateRest2(octokit) {
   return {
-    reviewsToDismiss,
-    reviewsWithoutHistory
+    paginate: Object.assign(paginate2.bind(null, octokit), {
+      iterator: iterator2.bind(null, octokit)
+    })
   };
+}
+paginateRest2.VERSION = VERSION11;
+
+// node_modules/@octokit/action/node_modules/@octokit/plugin-rest-endpoint-methods/dist-src/version.js
+var VERSION12 = "13.2.6";
+
+// node_modules/@octokit/action/node_modules/@octokit/plugin-rest-endpoint-methods/dist-src/generated/endpoints.js
+var Endpoints2 = {
+  actions: {
+    addCustomLabelsToSelfHostedRunnerForOrg: [
+      "POST /orgs/{org}/actions/runners/{runner_id}/labels"
+    ],
+    addCustomLabelsToSelfHostedRunnerForRepo: [
+      "POST /repos/{owner}/{repo}/actions/runners/{runner_id}/labels"
+    ],
+    addSelectedRepoToOrgSecret: [
+      "PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}"
+    ],
+    addSelectedRepoToOrgVariable: [
+      "PUT /orgs/{org}/actions/variables/{name}/repositories/{repository_id}"
+    ],
+    approveWorkflowRun: [
+      "POST /repos/{owner}/{repo}/actions/runs/{run_id}/approve"
+    ],
+    cancelWorkflowRun: [
+      "POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel"
+    ],
+    createEnvironmentVariable: [
+      "POST /repos/{owner}/{repo}/environments/{environment_name}/variables"
+    ],
+    createOrUpdateEnvironmentSecret: [
+      "PUT /repos/{owner}/{repo}/environments/{environment_name}/secrets/{secret_name}"
+    ],
+    createOrUpdateOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}"],
+    createOrUpdateRepoSecret: [
+      "PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}"
+    ],
+    createOrgVariable: ["POST /orgs/{org}/actions/variables"],
+    createRegistrationTokenForOrg: [
+      "POST /orgs/{org}/actions/runners/registration-token"
+    ],
+    createRegistrationTokenForRepo: [
+      "POST /repos/{owner}/{repo}/actions/runners/registration-token"
+    ],
+    createRemoveTokenForOrg: ["POST /orgs/{org}/actions/runners/remove-token"],
+    createRemoveTokenForRepo: [
+      "POST /repos/{owner}/{repo}/actions/runners/remove-token"
+    ],
+    createRepoVariable: ["POST /repos/{owner}/{repo}/actions/variables"],
+    createWorkflowDispatch: [
+      "POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches"
+    ],
+    deleteActionsCacheById: [
+      "DELETE /repos/{owner}/{repo}/actions/caches/{cache_id}"
+    ],
+    deleteActionsCacheByKey: [
+      "DELETE /repos/{owner}/{repo}/actions/caches{?key,ref}"
+    ],
+    deleteArtifact: [
+      "DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"
+    ],
+    deleteEnvironmentSecret: [
+      "DELETE /repos/{owner}/{repo}/environments/{environment_name}/secrets/{secret_name}"
+    ],
+    deleteEnvironmentVariable: [
+      "DELETE /repos/{owner}/{repo}/environments/{environment_name}/variables/{name}"
+    ],
+    deleteOrgSecret: ["DELETE /orgs/{org}/actions/secrets/{secret_name}"],
+    deleteOrgVariable: ["DELETE /orgs/{org}/actions/variables/{name}"],
+    deleteRepoSecret: [
+      "DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}"
+    ],
+    deleteRepoVariable: [
+      "DELETE /repos/{owner}/{repo}/actions/variables/{name}"
+    ],
+    deleteSelfHostedRunnerFromOrg: [
+      "DELETE /orgs/{org}/actions/runners/{runner_id}"
+    ],
+    deleteSelfHostedRunnerFromRepo: [
+      "DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}"
+    ],
+    deleteWorkflowRun: ["DELETE /repos/{owner}/{repo}/actions/runs/{run_id}"],
+    deleteWorkflowRunLogs: [
+      "DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs"
+    ],
+    disableSelectedRepositoryGithubActionsOrganization: [
+      "DELETE /orgs/{org}/actions/permissions/repositories/{repository_id}"
+    ],
+    disableWorkflow: [
+      "PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/disable"
+    ],
+    downloadArtifact: [
+      "GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}"
+    ],
+    downloadJobLogsForWorkflowRun: [
+      "GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs"
+    ],
+    downloadWorkflowRunAttemptLogs: [
+      "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/logs"
+    ],
+    downloadWorkflowRunLogs: [
+      "GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs"
+    ],
+    enableSelectedRepositoryGithubActionsOrganization: [
+      "PUT /orgs/{org}/actions/permissions/repositories/{repository_id}"
+    ],
+    enableWorkflow: [
+      "PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/enable"
+    ],
+    forceCancelWorkflowRun: [
+      "POST /repos/{owner}/{repo}/actions/runs/{run_id}/force-cancel"
+    ],
+    generateRunnerJitconfigForOrg: [
+      "POST /orgs/{org}/actions/runners/generate-jitconfig"
+    ],
+    generateRunnerJitconfigForRepo: [
+      "POST /repos/{owner}/{repo}/actions/runners/generate-jitconfig"
+    ],
+    getActionsCacheList: ["GET /repos/{owner}/{repo}/actions/caches"],
+    getActionsCacheUsage: ["GET /repos/{owner}/{repo}/actions/cache/usage"],
+    getActionsCacheUsageByRepoForOrg: [
+      "GET /orgs/{org}/actions/cache/usage-by-repository"
+    ],
+    getActionsCacheUsageForOrg: ["GET /orgs/{org}/actions/cache/usage"],
+    getAllowedActionsOrganization: [
+      "GET /orgs/{org}/actions/permissions/selected-actions"
+    ],
+    getAllowedActionsRepository: [
+      "GET /repos/{owner}/{repo}/actions/permissions/selected-actions"
+    ],
+    getArtifact: ["GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"],
+    getCustomOidcSubClaimForRepo: [
+      "GET /repos/{owner}/{repo}/actions/oidc/customization/sub"
+    ],
+    getEnvironmentPublicKey: [
+      "GET /repos/{owner}/{repo}/environments/{environment_name}/secrets/public-key"
+    ],
+    getEnvironmentSecret: [
+      "GET /repos/{owner}/{repo}/environments/{environment_name}/secrets/{secret_name}"
+    ],
+    getEnvironmentVariable: [
+      "GET /repos/{owner}/{repo}/environments/{environment_name}/variables/{name}"
+    ],
+    getGithubActionsDefaultWorkflowPermissionsOrganization: [
+      "GET /orgs/{org}/actions/permissions/workflow"
+    ],
+    getGithubActionsDefaultWorkflowPermissionsRepository: [
+      "GET /repos/{owner}/{repo}/actions/permissions/workflow"
+    ],
+    getGithubActionsPermissionsOrganization: [
+      "GET /orgs/{org}/actions/permissions"
+    ],
+    getGithubActionsPermissionsRepository: [
+      "GET /repos/{owner}/{repo}/actions/permissions"
+    ],
+    getJobForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}"],
+    getOrgPublicKey: ["GET /orgs/{org}/actions/secrets/public-key"],
+    getOrgSecret: ["GET /orgs/{org}/actions/secrets/{secret_name}"],
+    getOrgVariable: ["GET /orgs/{org}/actions/variables/{name}"],
+    getPendingDeploymentsForRun: [
+      "GET /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments"
+    ],
+    getRepoPermissions: [
+      "GET /repos/{owner}/{repo}/actions/permissions",
+      {},
+      { renamed: ["actions", "getGithubActionsPermissionsRepository"] }
+    ],
+    getRepoPublicKey: ["GET /repos/{owner}/{repo}/actions/secrets/public-key"],
+    getRepoSecret: ["GET /repos/{owner}/{repo}/actions/secrets/{secret_name}"],
+    getRepoVariable: ["GET /repos/{owner}/{repo}/actions/variables/{name}"],
+    getReviewsForRun: [
+      "GET /repos/{owner}/{repo}/actions/runs/{run_id}/approvals"
+    ],
+    getSelfHostedRunnerForOrg: ["GET /orgs/{org}/actions/runners/{runner_id}"],
+    getSelfHostedRunnerForRepo: [
+      "GET /repos/{owner}/{repo}/actions/runners/{runner_id}"
+    ],
+    getWorkflow: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}"],
+    getWorkflowAccessToRepository: [
+      "GET /repos/{owner}/{repo}/actions/permissions/access"
+    ],
+    getWorkflowRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}"],
+    getWorkflowRunAttempt: [
+      "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}"
+    ],
+    getWorkflowRunUsage: [
+      "GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing"
+    ],
+    getWorkflowUsage: [
+      "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing"
+    ],
+    listArtifactsForRepo: ["GET /repos/{owner}/{repo}/actions/artifacts"],
+    listEnvironmentSecrets: [
+      "GET /repos/{owner}/{repo}/environments/{environment_name}/secrets"
+    ],
+    listEnvironmentVariables: [
+      "GET /repos/{owner}/{repo}/environments/{environment_name}/variables"
+    ],
+    listJobsForWorkflowRun: [
+      "GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs"
+    ],
+    listJobsForWorkflowRunAttempt: [
+      "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs"
+    ],
+    listLabelsForSelfHostedRunnerForOrg: [
+      "GET /orgs/{org}/actions/runners/{runner_id}/labels"
+    ],
+    listLabelsForSelfHostedRunnerForRepo: [
+      "GET /repos/{owner}/{repo}/actions/runners/{runner_id}/labels"
+    ],
+    listOrgSecrets: ["GET /orgs/{org}/actions/secrets"],
+    listOrgVariables: ["GET /orgs/{org}/actions/variables"],
+    listRepoOrganizationSecrets: [
+      "GET /repos/{owner}/{repo}/actions/organization-secrets"
+    ],
+    listRepoOrganizationVariables: [
+      "GET /repos/{owner}/{repo}/actions/organization-variables"
+    ],
+    listRepoSecrets: ["GET /repos/{owner}/{repo}/actions/secrets"],
+    listRepoVariables: ["GET /repos/{owner}/{repo}/actions/variables"],
+    listRepoWorkflows: ["GET /repos/{owner}/{repo}/actions/workflows"],
+    listRunnerApplicationsForOrg: ["GET /orgs/{org}/actions/runners/downloads"],
+    listRunnerApplicationsForRepo: [
+      "GET /repos/{owner}/{repo}/actions/runners/downloads"
+    ],
+    listSelectedReposForOrgSecret: [
+      "GET /orgs/{org}/actions/secrets/{secret_name}/repositories"
+    ],
+    listSelectedReposForOrgVariable: [
+      "GET /orgs/{org}/actions/variables/{name}/repositories"
+    ],
+    listSelectedRepositoriesEnabledGithubActionsOrganization: [
+      "GET /orgs/{org}/actions/permissions/repositories"
+    ],
+    listSelfHostedRunnersForOrg: ["GET /orgs/{org}/actions/runners"],
+    listSelfHostedRunnersForRepo: ["GET /repos/{owner}/{repo}/actions/runners"],
+    listWorkflowRunArtifacts: [
+      "GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts"
+    ],
+    listWorkflowRuns: [
+      "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs"
+    ],
+    listWorkflowRunsForRepo: ["GET /repos/{owner}/{repo}/actions/runs"],
+    reRunJobForWorkflowRun: [
+      "POST /repos/{owner}/{repo}/actions/jobs/{job_id}/rerun"
+    ],
+    reRunWorkflow: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun"],
+    reRunWorkflowFailedJobs: [
+      "POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun-failed-jobs"
+    ],
+    removeAllCustomLabelsFromSelfHostedRunnerForOrg: [
+      "DELETE /orgs/{org}/actions/runners/{runner_id}/labels"
+    ],
+    removeAllCustomLabelsFromSelfHostedRunnerForRepo: [
+      "DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels"
+    ],
+    removeCustomLabelFromSelfHostedRunnerForOrg: [
+      "DELETE /orgs/{org}/actions/runners/{runner_id}/labels/{name}"
+    ],
+    removeCustomLabelFromSelfHostedRunnerForRepo: [
+      "DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels/{name}"
+    ],
+    removeSelectedRepoFromOrgSecret: [
+      "DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}"
+    ],
+    removeSelectedRepoFromOrgVariable: [
+      "DELETE /orgs/{org}/actions/variables/{name}/repositories/{repository_id}"
+    ],
+    reviewCustomGatesForRun: [
+      "POST /repos/{owner}/{repo}/actions/runs/{run_id}/deployment_protection_rule"
+    ],
+    reviewPendingDeploymentsForRun: [
+      "POST /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments"
+    ],
+    setAllowedActionsOrganization: [
+      "PUT /orgs/{org}/actions/permissions/selected-actions"
+    ],
+    setAllowedActionsRepository: [
+      "PUT /repos/{owner}/{repo}/actions/permissions/selected-actions"
+    ],
+    setCustomLabelsForSelfHostedRunnerForOrg: [
+      "PUT /orgs/{org}/actions/runners/{runner_id}/labels"
+    ],
+    setCustomLabelsForSelfHostedRunnerForRepo: [
+      "PUT /repos/{owner}/{repo}/actions/runners/{runner_id}/labels"
+    ],
+    setCustomOidcSubClaimForRepo: [
+      "PUT /repos/{owner}/{repo}/actions/oidc/customization/sub"
+    ],
+    setGithubActionsDefaultWorkflowPermissionsOrganization: [
+      "PUT /orgs/{org}/actions/permissions/workflow"
+    ],
+    setGithubActionsDefaultWorkflowPermissionsRepository: [
+      "PUT /repos/{owner}/{repo}/actions/permissions/workflow"
+    ],
+    setGithubActionsPermissionsOrganization: [
+      "PUT /orgs/{org}/actions/permissions"
+    ],
+    setGithubActionsPermissionsRepository: [
+      "PUT /repos/{owner}/{repo}/actions/permissions"
+    ],
+    setSelectedReposForOrgSecret: [
+      "PUT /orgs/{org}/actions/secrets/{secret_name}/repositories"
+    ],
+    setSelectedReposForOrgVariable: [
+      "PUT /orgs/{org}/actions/variables/{name}/repositories"
+    ],
+    setSelectedRepositoriesEnabledGithubActionsOrganization: [
+      "PUT /orgs/{org}/actions/permissions/repositories"
+    ],
+    setWorkflowAccessToRepository: [
+      "PUT /repos/{owner}/{repo}/actions/permissions/access"
+    ],
+    updateEnvironmentVariable: [
+      "PATCH /repos/{owner}/{repo}/environments/{environment_name}/variables/{name}"
+    ],
+    updateOrgVariable: ["PATCH /orgs/{org}/actions/variables/{name}"],
+    updateRepoVariable: [
+      "PATCH /repos/{owner}/{repo}/actions/variables/{name}"
+    ]
+  },
+  activity: {
+    checkRepoIsStarredByAuthenticatedUser: ["GET /user/starred/{owner}/{repo}"],
+    deleteRepoSubscription: ["DELETE /repos/{owner}/{repo}/subscription"],
+    deleteThreadSubscription: [
+      "DELETE /notifications/threads/{thread_id}/subscription"
+    ],
+    getFeeds: ["GET /feeds"],
+    getRepoSubscription: ["GET /repos/{owner}/{repo}/subscription"],
+    getThread: ["GET /notifications/threads/{thread_id}"],
+    getThreadSubscriptionForAuthenticatedUser: [
+      "GET /notifications/threads/{thread_id}/subscription"
+    ],
+    listEventsForAuthenticatedUser: ["GET /users/{username}/events"],
+    listNotificationsForAuthenticatedUser: ["GET /notifications"],
+    listOrgEventsForAuthenticatedUser: [
+      "GET /users/{username}/events/orgs/{org}"
+    ],
+    listPublicEvents: ["GET /events"],
+    listPublicEventsForRepoNetwork: ["GET /networks/{owner}/{repo}/events"],
+    listPublicEventsForUser: ["GET /users/{username}/events/public"],
+    listPublicOrgEvents: ["GET /orgs/{org}/events"],
+    listReceivedEventsForUser: ["GET /users/{username}/received_events"],
+    listReceivedPublicEventsForUser: [
+      "GET /users/{username}/received_events/public"
+    ],
+    listRepoEvents: ["GET /repos/{owner}/{repo}/events"],
+    listRepoNotificationsForAuthenticatedUser: [
+      "GET /repos/{owner}/{repo}/notifications"
+    ],
+    listReposStarredByAuthenticatedUser: ["GET /user/starred"],
+    listReposStarredByUser: ["GET /users/{username}/starred"],
+    listReposWatchedByUser: ["GET /users/{username}/subscriptions"],
+    listStargazersForRepo: ["GET /repos/{owner}/{repo}/stargazers"],
+    listWatchedReposForAuthenticatedUser: ["GET /user/subscriptions"],
+    listWatchersForRepo: ["GET /repos/{owner}/{repo}/subscribers"],
+    markNotificationsAsRead: ["PUT /notifications"],
+    markRepoNotificationsAsRead: ["PUT /repos/{owner}/{repo}/notifications"],
+    markThreadAsDone: ["DELETE /notifications/threads/{thread_id}"],
+    markThreadAsRead: ["PATCH /notifications/threads/{thread_id}"],
+    setRepoSubscription: ["PUT /repos/{owner}/{repo}/subscription"],
+    setThreadSubscription: [
+      "PUT /notifications/threads/{thread_id}/subscription"
+    ],
+    starRepoForAuthenticatedUser: ["PUT /user/starred/{owner}/{repo}"],
+    unstarRepoForAuthenticatedUser: ["DELETE /user/starred/{owner}/{repo}"]
+  },
+  apps: {
+    addRepoToInstallation: [
+      "PUT /user/installations/{installation_id}/repositories/{repository_id}",
+      {},
+      { renamed: ["apps", "addRepoToInstallationForAuthenticatedUser"] }
+    ],
+    addRepoToInstallationForAuthenticatedUser: [
+      "PUT /user/installations/{installation_id}/repositories/{repository_id}"
+    ],
+    checkToken: ["POST /applications/{client_id}/token"],
+    createFromManifest: ["POST /app-manifests/{code}/conversions"],
+    createInstallationAccessToken: [
+      "POST /app/installations/{installation_id}/access_tokens"
+    ],
+    deleteAuthorization: ["DELETE /applications/{client_id}/grant"],
+    deleteInstallation: ["DELETE /app/installations/{installation_id}"],
+    deleteToken: ["DELETE /applications/{client_id}/token"],
+    getAuthenticated: ["GET /app"],
+    getBySlug: ["GET /apps/{app_slug}"],
+    getInstallation: ["GET /app/installations/{installation_id}"],
+    getOrgInstallation: ["GET /orgs/{org}/installation"],
+    getRepoInstallation: ["GET /repos/{owner}/{repo}/installation"],
+    getSubscriptionPlanForAccount: [
+      "GET /marketplace_listing/accounts/{account_id}"
+    ],
+    getSubscriptionPlanForAccountStubbed: [
+      "GET /marketplace_listing/stubbed/accounts/{account_id}"
+    ],
+    getUserInstallation: ["GET /users/{username}/installation"],
+    getWebhookConfigForApp: ["GET /app/hook/config"],
+    getWebhookDelivery: ["GET /app/hook/deliveries/{delivery_id}"],
+    listAccountsForPlan: ["GET /marketplace_listing/plans/{plan_id}/accounts"],
+    listAccountsForPlanStubbed: [
+      "GET /marketplace_listing/stubbed/plans/{plan_id}/accounts"
+    ],
+    listInstallationReposForAuthenticatedUser: [
+      "GET /user/installations/{installation_id}/repositories"
+    ],
+    listInstallationRequestsForAuthenticatedApp: [
+      "GET /app/installation-requests"
+    ],
+    listInstallations: ["GET /app/installations"],
+    listInstallationsForAuthenticatedUser: ["GET /user/installations"],
+    listPlans: ["GET /marketplace_listing/plans"],
+    listPlansStubbed: ["GET /marketplace_listing/stubbed/plans"],
+    listReposAccessibleToInstallation: ["GET /installation/repositories"],
+    listSubscriptionsForAuthenticatedUser: ["GET /user/marketplace_purchases"],
+    listSubscriptionsForAuthenticatedUserStubbed: [
+      "GET /user/marketplace_purchases/stubbed"
+    ],
+    listWebhookDeliveries: ["GET /app/hook/deliveries"],
+    redeliverWebhookDelivery: [
+      "POST /app/hook/deliveries/{delivery_id}/attempts"
+    ],
+    removeRepoFromInstallation: [
+      "DELETE /user/installations/{installation_id}/repositories/{repository_id}",
+      {},
+      { renamed: ["apps", "removeRepoFromInstallationForAuthenticatedUser"] }
+    ],
+    removeRepoFromInstallationForAuthenticatedUser: [
+      "DELETE /user/installations/{installation_id}/repositories/{repository_id}"
+    ],
+    resetToken: ["PATCH /applications/{client_id}/token"],
+    revokeInstallationAccessToken: ["DELETE /installation/token"],
+    scopeToken: ["POST /applications/{client_id}/token/scoped"],
+    suspendInstallation: ["PUT /app/installations/{installation_id}/suspended"],
+    unsuspendInstallation: [
+      "DELETE /app/installations/{installation_id}/suspended"
+    ],
+    updateWebhookConfigForApp: ["PATCH /app/hook/config"]
+  },
+  billing: {
+    getGithubActionsBillingOrg: ["GET /orgs/{org}/settings/billing/actions"],
+    getGithubActionsBillingUser: [
+      "GET /users/{username}/settings/billing/actions"
+    ],
+    getGithubPackagesBillingOrg: ["GET /orgs/{org}/settings/billing/packages"],
+    getGithubPackagesBillingUser: [
+      "GET /users/{username}/settings/billing/packages"
+    ],
+    getSharedStorageBillingOrg: [
+      "GET /orgs/{org}/settings/billing/shared-storage"
+    ],
+    getSharedStorageBillingUser: [
+      "GET /users/{username}/settings/billing/shared-storage"
+    ]
+  },
+  checks: {
+    create: ["POST /repos/{owner}/{repo}/check-runs"],
+    createSuite: ["POST /repos/{owner}/{repo}/check-suites"],
+    get: ["GET /repos/{owner}/{repo}/check-runs/{check_run_id}"],
+    getSuite: ["GET /repos/{owner}/{repo}/check-suites/{check_suite_id}"],
+    listAnnotations: [
+      "GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations"
+    ],
+    listForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-runs"],
+    listForSuite: [
+      "GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs"
+    ],
+    listSuitesForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-suites"],
+    rerequestRun: [
+      "POST /repos/{owner}/{repo}/check-runs/{check_run_id}/rerequest"
+    ],
+    rerequestSuite: [
+      "POST /repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest"
+    ],
+    setSuitesPreferences: [
+      "PATCH /repos/{owner}/{repo}/check-suites/preferences"
+    ],
+    update: ["PATCH /repos/{owner}/{repo}/check-runs/{check_run_id}"]
+  },
+  codeScanning: {
+    deleteAnalysis: [
+      "DELETE /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}{?confirm_delete}"
+    ],
+    getAlert: [
+      "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}",
+      {},
+      { renamedParameters: { alert_id: "alert_number" } }
+    ],
+    getAnalysis: [
+      "GET /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}"
+    ],
+    getCodeqlDatabase: [
+      "GET /repos/{owner}/{repo}/code-scanning/codeql/databases/{language}"
+    ],
+    getDefaultSetup: ["GET /repos/{owner}/{repo}/code-scanning/default-setup"],
+    getSarif: ["GET /repos/{owner}/{repo}/code-scanning/sarifs/{sarif_id}"],
+    listAlertInstances: [
+      "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances"
+    ],
+    listAlertsForOrg: ["GET /orgs/{org}/code-scanning/alerts"],
+    listAlertsForRepo: ["GET /repos/{owner}/{repo}/code-scanning/alerts"],
+    listAlertsInstances: [
+      "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances",
+      {},
+      { renamed: ["codeScanning", "listAlertInstances"] }
+    ],
+    listCodeqlDatabases: [
+      "GET /repos/{owner}/{repo}/code-scanning/codeql/databases"
+    ],
+    listRecentAnalyses: ["GET /repos/{owner}/{repo}/code-scanning/analyses"],
+    updateAlert: [
+      "PATCH /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}"
+    ],
+    updateDefaultSetup: [
+      "PATCH /repos/{owner}/{repo}/code-scanning/default-setup"
+    ],
+    uploadSarif: ["POST /repos/{owner}/{repo}/code-scanning/sarifs"]
+  },
+  codesOfConduct: {
+    getAllCodesOfConduct: ["GET /codes_of_conduct"],
+    getConductCode: ["GET /codes_of_conduct/{key}"]
+  },
+  codespaces: {
+    addRepositoryForSecretForAuthenticatedUser: [
+      "PUT /user/codespaces/secrets/{secret_name}/repositories/{repository_id}"
+    ],
+    addSelectedRepoToOrgSecret: [
+      "PUT /orgs/{org}/codespaces/secrets/{secret_name}/repositories/{repository_id}"
+    ],
+    checkPermissionsForDevcontainer: [
+      "GET /repos/{owner}/{repo}/codespaces/permissions_check"
+    ],
+    codespaceMachinesForAuthenticatedUser: [
+      "GET /user/codespaces/{codespace_name}/machines"
+    ],
+    createForAuthenticatedUser: ["POST /user/codespaces"],
+    createOrUpdateOrgSecret: [
+      "PUT /orgs/{org}/codespaces/secrets/{secret_name}"
+    ],
+    createOrUpdateRepoSecret: [
+      "PUT /repos/{owner}/{repo}/codespaces/secrets/{secret_name}"
+    ],
+    createOrUpdateSecretForAuthenticatedUser: [
+      "PUT /user/codespaces/secrets/{secret_name}"
+    ],
+    createWithPrForAuthenticatedUser: [
+      "POST /repos/{owner}/{repo}/pulls/{pull_number}/codespaces"
+    ],
+    createWithRepoForAuthenticatedUser: [
+      "POST /repos/{owner}/{repo}/codespaces"
+    ],
+    deleteForAuthenticatedUser: ["DELETE /user/codespaces/{codespace_name}"],
+    deleteFromOrganization: [
+      "DELETE /orgs/{org}/members/{username}/codespaces/{codespace_name}"
+    ],
+    deleteOrgSecret: ["DELETE /orgs/{org}/codespaces/secrets/{secret_name}"],
+    deleteRepoSecret: [
+      "DELETE /repos/{owner}/{repo}/codespaces/secrets/{secret_name}"
+    ],
+    deleteSecretForAuthenticatedUser: [
+      "DELETE /user/codespaces/secrets/{secret_name}"
+    ],
+    exportForAuthenticatedUser: [
+      "POST /user/codespaces/{codespace_name}/exports"
+    ],
+    getCodespacesForUserInOrg: [
+      "GET /orgs/{org}/members/{username}/codespaces"
+    ],
+    getExportDetailsForAuthenticatedUser: [
+      "GET /user/codespaces/{codespace_name}/exports/{export_id}"
+    ],
+    getForAuthenticatedUser: ["GET /user/codespaces/{codespace_name}"],
+    getOrgPublicKey: ["GET /orgs/{org}/codespaces/secrets/public-key"],
+    getOrgSecret: ["GET /orgs/{org}/codespaces/secrets/{secret_name}"],
+    getPublicKeyForAuthenticatedUser: [
+      "GET /user/codespaces/secrets/public-key"
+    ],
+    getRepoPublicKey: [
+      "GET /repos/{owner}/{repo}/codespaces/secrets/public-key"
+    ],
+    getRepoSecret: [
+      "GET /repos/{owner}/{repo}/codespaces/secrets/{secret_name}"
+    ],
+    getSecretForAuthenticatedUser: [
+      "GET /user/codespaces/secrets/{secret_name}"
+    ],
+    listDevcontainersInRepositoryForAuthenticatedUser: [
+      "GET /repos/{owner}/{repo}/codespaces/devcontainers"
+    ],
+    listForAuthenticatedUser: ["GET /user/codespaces"],
+    listInOrganization: [
+      "GET /orgs/{org}/codespaces",
+      {},
+      { renamedParameters: { org_id: "org" } }
+    ],
+    listInRepositoryForAuthenticatedUser: [
+      "GET /repos/{owner}/{repo}/codespaces"
+    ],
+    listOrgSecrets: ["GET /orgs/{org}/codespaces/secrets"],
+    listRepoSecrets: ["GET /repos/{owner}/{repo}/codespaces/secrets"],
+    listRepositoriesForSecretForAuthenticatedUser: [
+      "GET /user/codespaces/secrets/{secret_name}/repositories"
+    ],
+    listSecretsForAuthenticatedUser: ["GET /user/codespaces/secrets"],
+    listSelectedReposForOrgSecret: [
+      "GET /orgs/{org}/codespaces/secrets/{secret_name}/repositories"
+    ],
+    preFlightWithRepoForAuthenticatedUser: [
+      "GET /repos/{owner}/{repo}/codespaces/new"
+    ],
+    publishForAuthenticatedUser: [
+      "POST /user/codespaces/{codespace_name}/publish"
+    ],
+    removeRepositoryForSecretForAuthenticatedUser: [
+      "DELETE /user/codespaces/secrets/{secret_name}/repositories/{repository_id}"
+    ],
+    removeSelectedRepoFromOrgSecret: [
+      "DELETE /orgs/{org}/codespaces/secrets/{secret_name}/repositories/{repository_id}"
+    ],
+    repoMachinesForAuthenticatedUser: [
+      "GET /repos/{owner}/{repo}/codespaces/machines"
+    ],
+    setRepositoriesForSecretForAuthenticatedUser: [
+      "PUT /user/codespaces/secrets/{secret_name}/repositories"
+    ],
+    setSelectedReposForOrgSecret: [
+      "PUT /orgs/{org}/codespaces/secrets/{secret_name}/repositories"
+    ],
+    startForAuthenticatedUser: ["POST /user/codespaces/{codespace_name}/start"],
+    stopForAuthenticatedUser: ["POST /user/codespaces/{codespace_name}/stop"],
+    stopInOrganization: [
+      "POST /orgs/{org}/members/{username}/codespaces/{codespace_name}/stop"
+    ],
+    updateForAuthenticatedUser: ["PATCH /user/codespaces/{codespace_name}"]
+  },
+  copilot: {
+    addCopilotSeatsForTeams: [
+      "POST /orgs/{org}/copilot/billing/selected_teams"
+    ],
+    addCopilotSeatsForUsers: [
+      "POST /orgs/{org}/copilot/billing/selected_users"
+    ],
+    cancelCopilotSeatAssignmentForTeams: [
+      "DELETE /orgs/{org}/copilot/billing/selected_teams"
+    ],
+    cancelCopilotSeatAssignmentForUsers: [
+      "DELETE /orgs/{org}/copilot/billing/selected_users"
+    ],
+    getCopilotOrganizationDetails: ["GET /orgs/{org}/copilot/billing"],
+    getCopilotSeatDetailsForUser: [
+      "GET /orgs/{org}/members/{username}/copilot"
+    ],
+    listCopilotSeats: ["GET /orgs/{org}/copilot/billing/seats"],
+    usageMetricsForEnterprise: ["GET /enterprises/{enterprise}/copilot/usage"],
+    usageMetricsForOrg: ["GET /orgs/{org}/copilot/usage"],
+    usageMetricsForTeam: ["GET /orgs/{org}/team/{team_slug}/copilot/usage"]
+  },
+  dependabot: {
+    addSelectedRepoToOrgSecret: [
+      "PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}"
+    ],
+    createOrUpdateOrgSecret: [
+      "PUT /orgs/{org}/dependabot/secrets/{secret_name}"
+    ],
+    createOrUpdateRepoSecret: [
+      "PUT /repos/{owner}/{repo}/dependabot/secrets/{secret_name}"
+    ],
+    deleteOrgSecret: ["DELETE /orgs/{org}/dependabot/secrets/{secret_name}"],
+    deleteRepoSecret: [
+      "DELETE /repos/{owner}/{repo}/dependabot/secrets/{secret_name}"
+    ],
+    getAlert: ["GET /repos/{owner}/{repo}/dependabot/alerts/{alert_number}"],
+    getOrgPublicKey: ["GET /orgs/{org}/dependabot/secrets/public-key"],
+    getOrgSecret: ["GET /orgs/{org}/dependabot/secrets/{secret_name}"],
+    getRepoPublicKey: [
+      "GET /repos/{owner}/{repo}/dependabot/secrets/public-key"
+    ],
+    getRepoSecret: [
+      "GET /repos/{owner}/{repo}/dependabot/secrets/{secret_name}"
+    ],
+    listAlertsForEnterprise: [
+      "GET /enterprises/{enterprise}/dependabot/alerts"
+    ],
+    listAlertsForOrg: ["GET /orgs/{org}/dependabot/alerts"],
+    listAlertsForRepo: ["GET /repos/{owner}/{repo}/dependabot/alerts"],
+    listOrgSecrets: ["GET /orgs/{org}/dependabot/secrets"],
+    listRepoSecrets: ["GET /repos/{owner}/{repo}/dependabot/secrets"],
+    listSelectedReposForOrgSecret: [
+      "GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories"
+    ],
+    removeSelectedRepoFromOrgSecret: [
+      "DELETE /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}"
+    ],
+    setSelectedReposForOrgSecret: [
+      "PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories"
+    ],
+    updateAlert: [
+      "PATCH /repos/{owner}/{repo}/dependabot/alerts/{alert_number}"
+    ]
+  },
+  dependencyGraph: {
+    createRepositorySnapshot: [
+      "POST /repos/{owner}/{repo}/dependency-graph/snapshots"
+    ],
+    diffRange: [
+      "GET /repos/{owner}/{repo}/dependency-graph/compare/{basehead}"
+    ],
+    exportSbom: ["GET /repos/{owner}/{repo}/dependency-graph/sbom"]
+  },
+  emojis: { get: ["GET /emojis"] },
+  gists: {
+    checkIsStarred: ["GET /gists/{gist_id}/star"],
+    create: ["POST /gists"],
+    createComment: ["POST /gists/{gist_id}/comments"],
+    delete: ["DELETE /gists/{gist_id}"],
+    deleteComment: ["DELETE /gists/{gist_id}/comments/{comment_id}"],
+    fork: ["POST /gists/{gist_id}/forks"],
+    get: ["GET /gists/{gist_id}"],
+    getComment: ["GET /gists/{gist_id}/comments/{comment_id}"],
+    getRevision: ["GET /gists/{gist_id}/{sha}"],
+    list: ["GET /gists"],
+    listComments: ["GET /gists/{gist_id}/comments"],
+    listCommits: ["GET /gists/{gist_id}/commits"],
+    listForUser: ["GET /users/{username}/gists"],
+    listForks: ["GET /gists/{gist_id}/forks"],
+    listPublic: ["GET /gists/public"],
+    listStarred: ["GET /gists/starred"],
+    star: ["PUT /gists/{gist_id}/star"],
+    unstar: ["DELETE /gists/{gist_id}/star"],
+    update: ["PATCH /gists/{gist_id}"],
+    updateComment: ["PATCH /gists/{gist_id}/comments/{comment_id}"]
+  },
+  git: {
+    createBlob: ["POST /repos/{owner}/{repo}/git/blobs"],
+    createCommit: ["POST /repos/{owner}/{repo}/git/commits"],
+    createRef: ["POST /repos/{owner}/{repo}/git/refs"],
+    createTag: ["POST /repos/{owner}/{repo}/git/tags"],
+    createTree: ["POST /repos/{owner}/{repo}/git/trees"],
+    deleteRef: ["DELETE /repos/{owner}/{repo}/git/refs/{ref}"],
+    getBlob: ["GET /repos/{owner}/{repo}/git/blobs/{file_sha}"],
+    getCommit: ["GET /repos/{owner}/{repo}/git/commits/{commit_sha}"],
+    getRef: ["GET /repos/{owner}/{repo}/git/ref/{ref}"],
+    getTag: ["GET /repos/{owner}/{repo}/git/tags/{tag_sha}"],
+    getTree: ["GET /repos/{owner}/{repo}/git/trees/{tree_sha}"],
+    listMatchingRefs: ["GET /repos/{owner}/{repo}/git/matching-refs/{ref}"],
+    updateRef: ["PATCH /repos/{owner}/{repo}/git/refs/{ref}"]
+  },
+  gitignore: {
+    getAllTemplates: ["GET /gitignore/templates"],
+    getTemplate: ["GET /gitignore/templates/{name}"]
+  },
+  interactions: {
+    getRestrictionsForAuthenticatedUser: ["GET /user/interaction-limits"],
+    getRestrictionsForOrg: ["GET /orgs/{org}/interaction-limits"],
+    getRestrictionsForRepo: ["GET /repos/{owner}/{repo}/interaction-limits"],
+    getRestrictionsForYourPublicRepos: [
+      "GET /user/interaction-limits",
+      {},
+      { renamed: ["interactions", "getRestrictionsForAuthenticatedUser"] }
+    ],
+    removeRestrictionsForAuthenticatedUser: ["DELETE /user/interaction-limits"],
+    removeRestrictionsForOrg: ["DELETE /orgs/{org}/interaction-limits"],
+    removeRestrictionsForRepo: [
+      "DELETE /repos/{owner}/{repo}/interaction-limits"
+    ],
+    removeRestrictionsForYourPublicRepos: [
+      "DELETE /user/interaction-limits",
+      {},
+      { renamed: ["interactions", "removeRestrictionsForAuthenticatedUser"] }
+    ],
+    setRestrictionsForAuthenticatedUser: ["PUT /user/interaction-limits"],
+    setRestrictionsForOrg: ["PUT /orgs/{org}/interaction-limits"],
+    setRestrictionsForRepo: ["PUT /repos/{owner}/{repo}/interaction-limits"],
+    setRestrictionsForYourPublicRepos: [
+      "PUT /user/interaction-limits",
+      {},
+      { renamed: ["interactions", "setRestrictionsForAuthenticatedUser"] }
+    ]
+  },
+  issues: {
+    addAssignees: [
+      "POST /repos/{owner}/{repo}/issues/{issue_number}/assignees"
+    ],
+    addLabels: ["POST /repos/{owner}/{repo}/issues/{issue_number}/labels"],
+    checkUserCanBeAssigned: ["GET /repos/{owner}/{repo}/assignees/{assignee}"],
+    checkUserCanBeAssignedToIssue: [
+      "GET /repos/{owner}/{repo}/issues/{issue_number}/assignees/{assignee}"
+    ],
+    create: ["POST /repos/{owner}/{repo}/issues"],
+    createComment: [
+      "POST /repos/{owner}/{repo}/issues/{issue_number}/comments"
+    ],
+    createLabel: ["POST /repos/{owner}/{repo}/labels"],
+    createMilestone: ["POST /repos/{owner}/{repo}/milestones"],
+    deleteComment: [
+      "DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}"
+    ],
+    deleteLabel: ["DELETE /repos/{owner}/{repo}/labels/{name}"],
+    deleteMilestone: [
+      "DELETE /repos/{owner}/{repo}/milestones/{milestone_number}"
+    ],
+    get: ["GET /repos/{owner}/{repo}/issues/{issue_number}"],
+    getComment: ["GET /repos/{owner}/{repo}/issues/comments/{comment_id}"],
+    getEvent: ["GET /repos/{owner}/{repo}/issues/events/{event_id}"],
+    getLabel: ["GET /repos/{owner}/{repo}/labels/{name}"],
+    getMilestone: ["GET /repos/{owner}/{repo}/milestones/{milestone_number}"],
+    list: ["GET /issues"],
+    listAssignees: ["GET /repos/{owner}/{repo}/assignees"],
+    listComments: ["GET /repos/{owner}/{repo}/issues/{issue_number}/comments"],
+    listCommentsForRepo: ["GET /repos/{owner}/{repo}/issues/comments"],
+    listEvents: ["GET /repos/{owner}/{repo}/issues/{issue_number}/events"],
+    listEventsForRepo: ["GET /repos/{owner}/{repo}/issues/events"],
+    listEventsForTimeline: [
+      "GET /repos/{owner}/{repo}/issues/{issue_number}/timeline"
+    ],
+    listForAuthenticatedUser: ["GET /user/issues"],
+    listForOrg: ["GET /orgs/{org}/issues"],
+    listForRepo: ["GET /repos/{owner}/{repo}/issues"],
+    listLabelsForMilestone: [
+      "GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels"
+    ],
+    listLabelsForRepo: ["GET /repos/{owner}/{repo}/labels"],
+    listLabelsOnIssue: [
+      "GET /repos/{owner}/{repo}/issues/{issue_number}/labels"
+    ],
+    listMilestones: ["GET /repos/{owner}/{repo}/milestones"],
+    lock: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/lock"],
+    removeAllLabels: [
+      "DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels"
+    ],
+    removeAssignees: [
+      "DELETE /repos/{owner}/{repo}/issues/{issue_number}/assignees"
+    ],
+    removeLabel: [
+      "DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels/{name}"
+    ],
+    setLabels: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/labels"],
+    unlock: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/lock"],
+    update: ["PATCH /repos/{owner}/{repo}/issues/{issue_number}"],
+    updateComment: ["PATCH /repos/{owner}/{repo}/issues/comments/{comment_id}"],
+    updateLabel: ["PATCH /repos/{owner}/{repo}/labels/{name}"],
+    updateMilestone: [
+      "PATCH /repos/{owner}/{repo}/milestones/{milestone_number}"
+    ]
+  },
+  licenses: {
+    get: ["GET /licenses/{license}"],
+    getAllCommonlyUsed: ["GET /licenses"],
+    getForRepo: ["GET /repos/{owner}/{repo}/license"]
+  },
+  markdown: {
+    render: ["POST /markdown"],
+    renderRaw: [
+      "POST /markdown/raw",
+      { headers: { "content-type": "text/plain; charset=utf-8" } }
+    ]
+  },
+  meta: {
+    get: ["GET /meta"],
+    getAllVersions: ["GET /versions"],
+    getOctocat: ["GET /octocat"],
+    getZen: ["GET /zen"],
+    root: ["GET /"]
+  },
+  migrations: {
+    deleteArchiveForAuthenticatedUser: [
+      "DELETE /user/migrations/{migration_id}/archive"
+    ],
+    deleteArchiveForOrg: [
+      "DELETE /orgs/{org}/migrations/{migration_id}/archive"
+    ],
+    downloadArchiveForOrg: [
+      "GET /orgs/{org}/migrations/{migration_id}/archive"
+    ],
+    getArchiveForAuthenticatedUser: [
+      "GET /user/migrations/{migration_id}/archive"
+    ],
+    getStatusForAuthenticatedUser: ["GET /user/migrations/{migration_id}"],
+    getStatusForOrg: ["GET /orgs/{org}/migrations/{migration_id}"],
+    listForAuthenticatedUser: ["GET /user/migrations"],
+    listForOrg: ["GET /orgs/{org}/migrations"],
+    listReposForAuthenticatedUser: [
+      "GET /user/migrations/{migration_id}/repositories"
+    ],
+    listReposForOrg: ["GET /orgs/{org}/migrations/{migration_id}/repositories"],
+    listReposForUser: [
+      "GET /user/migrations/{migration_id}/repositories",
+      {},
+      { renamed: ["migrations", "listReposForAuthenticatedUser"] }
+    ],
+    startForAuthenticatedUser: ["POST /user/migrations"],
+    startForOrg: ["POST /orgs/{org}/migrations"],
+    unlockRepoForAuthenticatedUser: [
+      "DELETE /user/migrations/{migration_id}/repos/{repo_name}/lock"
+    ],
+    unlockRepoForOrg: [
+      "DELETE /orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock"
+    ]
+  },
+  oidc: {
+    getOidcCustomSubTemplateForOrg: [
+      "GET /orgs/{org}/actions/oidc/customization/sub"
+    ],
+    updateOidcCustomSubTemplateForOrg: [
+      "PUT /orgs/{org}/actions/oidc/customization/sub"
+    ]
+  },
+  orgs: {
+    addSecurityManagerTeam: [
+      "PUT /orgs/{org}/security-managers/teams/{team_slug}"
+    ],
+    assignTeamToOrgRole: [
+      "PUT /orgs/{org}/organization-roles/teams/{team_slug}/{role_id}"
+    ],
+    assignUserToOrgRole: [
+      "PUT /orgs/{org}/organization-roles/users/{username}/{role_id}"
+    ],
+    blockUser: ["PUT /orgs/{org}/blocks/{username}"],
+    cancelInvitation: ["DELETE /orgs/{org}/invitations/{invitation_id}"],
+    checkBlockedUser: ["GET /orgs/{org}/blocks/{username}"],
+    checkMembershipForUser: ["GET /orgs/{org}/members/{username}"],
+    checkPublicMembershipForUser: ["GET /orgs/{org}/public_members/{username}"],
+    convertMemberToOutsideCollaborator: [
+      "PUT /orgs/{org}/outside_collaborators/{username}"
+    ],
+    createCustomOrganizationRole: ["POST /orgs/{org}/organization-roles"],
+    createInvitation: ["POST /orgs/{org}/invitations"],
+    createOrUpdateCustomProperties: ["PATCH /orgs/{org}/properties/schema"],
+    createOrUpdateCustomPropertiesValuesForRepos: [
+      "PATCH /orgs/{org}/properties/values"
+    ],
+    createOrUpdateCustomProperty: [
+      "PUT /orgs/{org}/properties/schema/{custom_property_name}"
+    ],
+    createWebhook: ["POST /orgs/{org}/hooks"],
+    delete: ["DELETE /orgs/{org}"],
+    deleteCustomOrganizationRole: [
+      "DELETE /orgs/{org}/organization-roles/{role_id}"
+    ],
+    deleteWebhook: ["DELETE /orgs/{org}/hooks/{hook_id}"],
+    enableOrDisableSecurityProductOnAllOrgRepos: [
+      "POST /orgs/{org}/{security_product}/{enablement}"
+    ],
+    get: ["GET /orgs/{org}"],
+    getAllCustomProperties: ["GET /orgs/{org}/properties/schema"],
+    getCustomProperty: [
+      "GET /orgs/{org}/properties/schema/{custom_property_name}"
+    ],
+    getMembershipForAuthenticatedUser: ["GET /user/memberships/orgs/{org}"],
+    getMembershipForUser: ["GET /orgs/{org}/memberships/{username}"],
+    getOrgRole: ["GET /orgs/{org}/organization-roles/{role_id}"],
+    getWebhook: ["GET /orgs/{org}/hooks/{hook_id}"],
+    getWebhookConfigForOrg: ["GET /orgs/{org}/hooks/{hook_id}/config"],
+    getWebhookDelivery: [
+      "GET /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}"
+    ],
+    list: ["GET /organizations"],
+    listAppInstallations: ["GET /orgs/{org}/installations"],
+    listBlockedUsers: ["GET /orgs/{org}/blocks"],
+    listCustomPropertiesValuesForRepos: ["GET /orgs/{org}/properties/values"],
+    listFailedInvitations: ["GET /orgs/{org}/failed_invitations"],
+    listForAuthenticatedUser: ["GET /user/orgs"],
+    listForUser: ["GET /users/{username}/orgs"],
+    listInvitationTeams: ["GET /orgs/{org}/invitations/{invitation_id}/teams"],
+    listMembers: ["GET /orgs/{org}/members"],
+    listMembershipsForAuthenticatedUser: ["GET /user/memberships/orgs"],
+    listOrgRoleTeams: ["GET /orgs/{org}/organization-roles/{role_id}/teams"],
+    listOrgRoleUsers: ["GET /orgs/{org}/organization-roles/{role_id}/users"],
+    listOrgRoles: ["GET /orgs/{org}/organization-roles"],
+    listOrganizationFineGrainedPermissions: [
+      "GET /orgs/{org}/organization-fine-grained-permissions"
+    ],
+    listOutsideCollaborators: ["GET /orgs/{org}/outside_collaborators"],
+    listPatGrantRepositories: [
+      "GET /orgs/{org}/personal-access-tokens/{pat_id}/repositories"
+    ],
+    listPatGrantRequestRepositories: [
+      "GET /orgs/{org}/personal-access-token-requests/{pat_request_id}/repositories"
+    ],
+    listPatGrantRequests: ["GET /orgs/{org}/personal-access-token-requests"],
+    listPatGrants: ["GET /orgs/{org}/personal-access-tokens"],
+    listPendingInvitations: ["GET /orgs/{org}/invitations"],
+    listPublicMembers: ["GET /orgs/{org}/public_members"],
+    listSecurityManagerTeams: ["GET /orgs/{org}/security-managers"],
+    listWebhookDeliveries: ["GET /orgs/{org}/hooks/{hook_id}/deliveries"],
+    listWebhooks: ["GET /orgs/{org}/hooks"],
+    patchCustomOrganizationRole: [
+      "PATCH /orgs/{org}/organization-roles/{role_id}"
+    ],
+    pingWebhook: ["POST /orgs/{org}/hooks/{hook_id}/pings"],
+    redeliverWebhookDelivery: [
+      "POST /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}/attempts"
+    ],
+    removeCustomProperty: [
+      "DELETE /orgs/{org}/properties/schema/{custom_property_name}"
+    ],
+    removeMember: ["DELETE /orgs/{org}/members/{username}"],
+    removeMembershipForUser: ["DELETE /orgs/{org}/memberships/{username}"],
+    removeOutsideCollaborator: [
+      "DELETE /orgs/{org}/outside_collaborators/{username}"
+    ],
+    removePublicMembershipForAuthenticatedUser: [
+      "DELETE /orgs/{org}/public_members/{username}"
+    ],
+    removeSecurityManagerTeam: [
+      "DELETE /orgs/{org}/security-managers/teams/{team_slug}"
+    ],
+    reviewPatGrantRequest: [
+      "POST /orgs/{org}/personal-access-token-requests/{pat_request_id}"
+    ],
+    reviewPatGrantRequestsInBulk: [
+      "POST /orgs/{org}/personal-access-token-requests"
+    ],
+    revokeAllOrgRolesTeam: [
+      "DELETE /orgs/{org}/organization-roles/teams/{team_slug}"
+    ],
+    revokeAllOrgRolesUser: [
+      "DELETE /orgs/{org}/organization-roles/users/{username}"
+    ],
+    revokeOrgRoleTeam: [
+      "DELETE /orgs/{org}/organization-roles/teams/{team_slug}/{role_id}"
+    ],
+    revokeOrgRoleUser: [
+      "DELETE /orgs/{org}/organization-roles/users/{username}/{role_id}"
+    ],
+    setMembershipForUser: ["PUT /orgs/{org}/memberships/{username}"],
+    setPublicMembershipForAuthenticatedUser: [
+      "PUT /orgs/{org}/public_members/{username}"
+    ],
+    unblockUser: ["DELETE /orgs/{org}/blocks/{username}"],
+    update: ["PATCH /orgs/{org}"],
+    updateMembershipForAuthenticatedUser: [
+      "PATCH /user/memberships/orgs/{org}"
+    ],
+    updatePatAccess: ["POST /orgs/{org}/personal-access-tokens/{pat_id}"],
+    updatePatAccesses: ["POST /orgs/{org}/personal-access-tokens"],
+    updateWebhook: ["PATCH /orgs/{org}/hooks/{hook_id}"],
+    updateWebhookConfigForOrg: ["PATCH /orgs/{org}/hooks/{hook_id}/config"]
+  },
+  packages: {
+    deletePackageForAuthenticatedUser: [
+      "DELETE /user/packages/{package_type}/{package_name}"
+    ],
+    deletePackageForOrg: [
+      "DELETE /orgs/{org}/packages/{package_type}/{package_name}"
+    ],
+    deletePackageForUser: [
+      "DELETE /users/{username}/packages/{package_type}/{package_name}"
+    ],
+    deletePackageVersionForAuthenticatedUser: [
+      "DELETE /user/packages/{package_type}/{package_name}/versions/{package_version_id}"
+    ],
+    deletePackageVersionForOrg: [
+      "DELETE /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}"
+    ],
+    deletePackageVersionForUser: [
+      "DELETE /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}"
+    ],
+    getAllPackageVersionsForAPackageOwnedByAnOrg: [
+      "GET /orgs/{org}/packages/{package_type}/{package_name}/versions",
+      {},
+      { renamed: ["packages", "getAllPackageVersionsForPackageOwnedByOrg"] }
+    ],
+    getAllPackageVersionsForAPackageOwnedByTheAuthenticatedUser: [
+      "GET /user/packages/{package_type}/{package_name}/versions",
+      {},
+      {
+        renamed: [
+          "packages",
+          "getAllPackageVersionsForPackageOwnedByAuthenticatedUser"
+        ]
+      }
+    ],
+    getAllPackageVersionsForPackageOwnedByAuthenticatedUser: [
+      "GET /user/packages/{package_type}/{package_name}/versions"
+    ],
+    getAllPackageVersionsForPackageOwnedByOrg: [
+      "GET /orgs/{org}/packages/{package_type}/{package_name}/versions"
+    ],
+    getAllPackageVersionsForPackageOwnedByUser: [
+      "GET /users/{username}/packages/{package_type}/{package_name}/versions"
+    ],
+    getPackageForAuthenticatedUser: [
+      "GET /user/packages/{package_type}/{package_name}"
+    ],
+    getPackageForOrganization: [
+      "GET /orgs/{org}/packages/{package_type}/{package_name}"
+    ],
+    getPackageForUser: [
+      "GET /users/{username}/packages/{package_type}/{package_name}"
+    ],
+    getPackageVersionForAuthenticatedUser: [
+      "GET /user/packages/{package_type}/{package_name}/versions/{package_version_id}"
+    ],
+    getPackageVersionForOrganization: [
+      "GET /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}"
+    ],
+    getPackageVersionForUser: [
+      "GET /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}"
+    ],
+    listDockerMigrationConflictingPackagesForAuthenticatedUser: [
+      "GET /user/docker/conflicts"
+    ],
+    listDockerMigrationConflictingPackagesForOrganization: [
+      "GET /orgs/{org}/docker/conflicts"
+    ],
+    listDockerMigrationConflictingPackagesForUser: [
+      "GET /users/{username}/docker/conflicts"
+    ],
+    listPackagesForAuthenticatedUser: ["GET /user/packages"],
+    listPackagesForOrganization: ["GET /orgs/{org}/packages"],
+    listPackagesForUser: ["GET /users/{username}/packages"],
+    restorePackageForAuthenticatedUser: [
+      "POST /user/packages/{package_type}/{package_name}/restore{?token}"
+    ],
+    restorePackageForOrg: [
+      "POST /orgs/{org}/packages/{package_type}/{package_name}/restore{?token}"
+    ],
+    restorePackageForUser: [
+      "POST /users/{username}/packages/{package_type}/{package_name}/restore{?token}"
+    ],
+    restorePackageVersionForAuthenticatedUser: [
+      "POST /user/packages/{package_type}/{package_name}/versions/{package_version_id}/restore"
+    ],
+    restorePackageVersionForOrg: [
+      "POST /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore"
+    ],
+    restorePackageVersionForUser: [
+      "POST /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore"
+    ]
+  },
+  projects: {
+    addCollaborator: ["PUT /projects/{project_id}/collaborators/{username}"],
+    createCard: ["POST /projects/columns/{column_id}/cards"],
+    createColumn: ["POST /projects/{project_id}/columns"],
+    createForAuthenticatedUser: ["POST /user/projects"],
+    createForOrg: ["POST /orgs/{org}/projects"],
+    createForRepo: ["POST /repos/{owner}/{repo}/projects"],
+    delete: ["DELETE /projects/{project_id}"],
+    deleteCard: ["DELETE /projects/columns/cards/{card_id}"],
+    deleteColumn: ["DELETE /projects/columns/{column_id}"],
+    get: ["GET /projects/{project_id}"],
+    getCard: ["GET /projects/columns/cards/{card_id}"],
+    getColumn: ["GET /projects/columns/{column_id}"],
+    getPermissionForUser: [
+      "GET /projects/{project_id}/collaborators/{username}/permission"
+    ],
+    listCards: ["GET /projects/columns/{column_id}/cards"],
+    listCollaborators: ["GET /projects/{project_id}/collaborators"],
+    listColumns: ["GET /projects/{project_id}/columns"],
+    listForOrg: ["GET /orgs/{org}/projects"],
+    listForRepo: ["GET /repos/{owner}/{repo}/projects"],
+    listForUser: ["GET /users/{username}/projects"],
+    moveCard: ["POST /projects/columns/cards/{card_id}/moves"],
+    moveColumn: ["POST /projects/columns/{column_id}/moves"],
+    removeCollaborator: [
+      "DELETE /projects/{project_id}/collaborators/{username}"
+    ],
+    update: ["PATCH /projects/{project_id}"],
+    updateCard: ["PATCH /projects/columns/cards/{card_id}"],
+    updateColumn: ["PATCH /projects/columns/{column_id}"]
+  },
+  pulls: {
+    checkIfMerged: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/merge"],
+    create: ["POST /repos/{owner}/{repo}/pulls"],
+    createReplyForReviewComment: [
+      "POST /repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies"
+    ],
+    createReview: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews"],
+    createReviewComment: [
+      "POST /repos/{owner}/{repo}/pulls/{pull_number}/comments"
+    ],
+    deletePendingReview: [
+      "DELETE /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"
+    ],
+    deleteReviewComment: [
+      "DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}"
+    ],
+    dismissReview: [
+      "PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals"
+    ],
+    get: ["GET /repos/{owner}/{repo}/pulls/{pull_number}"],
+    getReview: [
+      "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"
+    ],
+    getReviewComment: ["GET /repos/{owner}/{repo}/pulls/comments/{comment_id}"],
+    list: ["GET /repos/{owner}/{repo}/pulls"],
+    listCommentsForReview: [
+      "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments"
+    ],
+    listCommits: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/commits"],
+    listFiles: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/files"],
+    listRequestedReviewers: [
+      "GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"
+    ],
+    listReviewComments: [
+      "GET /repos/{owner}/{repo}/pulls/{pull_number}/comments"
+    ],
+    listReviewCommentsForRepo: ["GET /repos/{owner}/{repo}/pulls/comments"],
+    listReviews: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews"],
+    merge: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/merge"],
+    removeRequestedReviewers: [
+      "DELETE /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"
+    ],
+    requestReviewers: [
+      "POST /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"
+    ],
+    submitReview: [
+      "POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events"
+    ],
+    update: ["PATCH /repos/{owner}/{repo}/pulls/{pull_number}"],
+    updateBranch: [
+      "PUT /repos/{owner}/{repo}/pulls/{pull_number}/update-branch"
+    ],
+    updateReview: [
+      "PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"
+    ],
+    updateReviewComment: [
+      "PATCH /repos/{owner}/{repo}/pulls/comments/{comment_id}"
+    ]
+  },
+  rateLimit: { get: ["GET /rate_limit"] },
+  reactions: {
+    createForCommitComment: [
+      "POST /repos/{owner}/{repo}/comments/{comment_id}/reactions"
+    ],
+    createForIssue: [
+      "POST /repos/{owner}/{repo}/issues/{issue_number}/reactions"
+    ],
+    createForIssueComment: [
+      "POST /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions"
+    ],
+    createForPullRequestReviewComment: [
+      "POST /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions"
+    ],
+    createForRelease: [
+      "POST /repos/{owner}/{repo}/releases/{release_id}/reactions"
+    ],
+    createForTeamDiscussionCommentInOrg: [
+      "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions"
+    ],
+    createForTeamDiscussionInOrg: [
+      "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions"
+    ],
+    deleteForCommitComment: [
+      "DELETE /repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}"
+    ],
+    deleteForIssue: [
+      "DELETE /repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}"
+    ],
+    deleteForIssueComment: [
+      "DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}"
+    ],
+    deleteForPullRequestComment: [
+      "DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}"
+    ],
+    deleteForRelease: [
+      "DELETE /repos/{owner}/{repo}/releases/{release_id}/reactions/{reaction_id}"
+    ],
+    deleteForTeamDiscussion: [
+      "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}"
+    ],
+    deleteForTeamDiscussionComment: [
+      "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}"
+    ],
+    listForCommitComment: [
+      "GET /repos/{owner}/{repo}/comments/{comment_id}/reactions"
+    ],
+    listForIssue: ["GET /repos/{owner}/{repo}/issues/{issue_number}/reactions"],
+    listForIssueComment: [
+      "GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions"
+    ],
+    listForPullRequestReviewComment: [
+      "GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions"
+    ],
+    listForRelease: [
+      "GET /repos/{owner}/{repo}/releases/{release_id}/reactions"
+    ],
+    listForTeamDiscussionCommentInOrg: [
+      "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions"
+    ],
+    listForTeamDiscussionInOrg: [
+      "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions"
+    ]
+  },
+  repos: {
+    acceptInvitation: [
+      "PATCH /user/repository_invitations/{invitation_id}",
+      {},
+      { renamed: ["repos", "acceptInvitationForAuthenticatedUser"] }
+    ],
+    acceptInvitationForAuthenticatedUser: [
+      "PATCH /user/repository_invitations/{invitation_id}"
+    ],
+    addAppAccessRestrictions: [
+      "POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps",
+      {},
+      { mapToData: "apps" }
+    ],
+    addCollaborator: ["PUT /repos/{owner}/{repo}/collaborators/{username}"],
+    addStatusCheckContexts: [
+      "POST /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts",
+      {},
+      { mapToData: "contexts" }
+    ],
+    addTeamAccessRestrictions: [
+      "POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams",
+      {},
+      { mapToData: "teams" }
+    ],
+    addUserAccessRestrictions: [
+      "POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users",
+      {},
+      { mapToData: "users" }
+    ],
+    cancelPagesDeployment: [
+      "POST /repos/{owner}/{repo}/pages/deployments/{pages_deployment_id}/cancel"
+    ],
+    checkAutomatedSecurityFixes: [
+      "GET /repos/{owner}/{repo}/automated-security-fixes"
+    ],
+    checkCollaborator: ["GET /repos/{owner}/{repo}/collaborators/{username}"],
+    checkPrivateVulnerabilityReporting: [
+      "GET /repos/{owner}/{repo}/private-vulnerability-reporting"
+    ],
+    checkVulnerabilityAlerts: [
+      "GET /repos/{owner}/{repo}/vulnerability-alerts"
+    ],
+    codeownersErrors: ["GET /repos/{owner}/{repo}/codeowners/errors"],
+    compareCommits: ["GET /repos/{owner}/{repo}/compare/{base}...{head}"],
+    compareCommitsWithBasehead: [
+      "GET /repos/{owner}/{repo}/compare/{basehead}"
+    ],
+    createAutolink: ["POST /repos/{owner}/{repo}/autolinks"],
+    createCommitComment: [
+      "POST /repos/{owner}/{repo}/commits/{commit_sha}/comments"
+    ],
+    createCommitSignatureProtection: [
+      "POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures"
+    ],
+    createCommitStatus: ["POST /repos/{owner}/{repo}/statuses/{sha}"],
+    createDeployKey: ["POST /repos/{owner}/{repo}/keys"],
+    createDeployment: ["POST /repos/{owner}/{repo}/deployments"],
+    createDeploymentBranchPolicy: [
+      "POST /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies"
+    ],
+    createDeploymentProtectionRule: [
+      "POST /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules"
+    ],
+    createDeploymentStatus: [
+      "POST /repos/{owner}/{repo}/deployments/{deployment_id}/statuses"
+    ],
+    createDispatchEvent: ["POST /repos/{owner}/{repo}/dispatches"],
+    createForAuthenticatedUser: ["POST /user/repos"],
+    createFork: ["POST /repos/{owner}/{repo}/forks"],
+    createInOrg: ["POST /orgs/{org}/repos"],
+    createOrUpdateCustomPropertiesValues: [
+      "PATCH /repos/{owner}/{repo}/properties/values"
+    ],
+    createOrUpdateEnvironment: [
+      "PUT /repos/{owner}/{repo}/environments/{environment_name}"
+    ],
+    createOrUpdateFileContents: ["PUT /repos/{owner}/{repo}/contents/{path}"],
+    createOrgRuleset: ["POST /orgs/{org}/rulesets"],
+    createPagesDeployment: ["POST /repos/{owner}/{repo}/pages/deployments"],
+    createPagesSite: ["POST /repos/{owner}/{repo}/pages"],
+    createRelease: ["POST /repos/{owner}/{repo}/releases"],
+    createRepoRuleset: ["POST /repos/{owner}/{repo}/rulesets"],
+    createTagProtection: ["POST /repos/{owner}/{repo}/tags/protection"],
+    createUsingTemplate: [
+      "POST /repos/{template_owner}/{template_repo}/generate"
+    ],
+    createWebhook: ["POST /repos/{owner}/{repo}/hooks"],
+    declineInvitation: [
+      "DELETE /user/repository_invitations/{invitation_id}",
+      {},
+      { renamed: ["repos", "declineInvitationForAuthenticatedUser"] }
+    ],
+    declineInvitationForAuthenticatedUser: [
+      "DELETE /user/repository_invitations/{invitation_id}"
+    ],
+    delete: ["DELETE /repos/{owner}/{repo}"],
+    deleteAccessRestrictions: [
+      "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions"
+    ],
+    deleteAdminBranchProtection: [
+      "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"
+    ],
+    deleteAnEnvironment: [
+      "DELETE /repos/{owner}/{repo}/environments/{environment_name}"
+    ],
+    deleteAutolink: ["DELETE /repos/{owner}/{repo}/autolinks/{autolink_id}"],
+    deleteBranchProtection: [
+      "DELETE /repos/{owner}/{repo}/branches/{branch}/protection"
+    ],
+    deleteCommitComment: ["DELETE /repos/{owner}/{repo}/comments/{comment_id}"],
+    deleteCommitSignatureProtection: [
+      "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures"
+    ],
+    deleteDeployKey: ["DELETE /repos/{owner}/{repo}/keys/{key_id}"],
+    deleteDeployment: [
+      "DELETE /repos/{owner}/{repo}/deployments/{deployment_id}"
+    ],
+    deleteDeploymentBranchPolicy: [
+      "DELETE /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}"
+    ],
+    deleteFile: ["DELETE /repos/{owner}/{repo}/contents/{path}"],
+    deleteInvitation: [
+      "DELETE /repos/{owner}/{repo}/invitations/{invitation_id}"
+    ],
+    deleteOrgRuleset: ["DELETE /orgs/{org}/rulesets/{ruleset_id}"],
+    deletePagesSite: ["DELETE /repos/{owner}/{repo}/pages"],
+    deletePullRequestReviewProtection: [
+      "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"
+    ],
+    deleteRelease: ["DELETE /repos/{owner}/{repo}/releases/{release_id}"],
+    deleteReleaseAsset: [
+      "DELETE /repos/{owner}/{repo}/releases/assets/{asset_id}"
+    ],
+    deleteRepoRuleset: ["DELETE /repos/{owner}/{repo}/rulesets/{ruleset_id}"],
+    deleteTagProtection: [
+      "DELETE /repos/{owner}/{repo}/tags/protection/{tag_protection_id}"
+    ],
+    deleteWebhook: ["DELETE /repos/{owner}/{repo}/hooks/{hook_id}"],
+    disableAutomatedSecurityFixes: [
+      "DELETE /repos/{owner}/{repo}/automated-security-fixes"
+    ],
+    disableDeploymentProtectionRule: [
+      "DELETE /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/{protection_rule_id}"
+    ],
+    disablePrivateVulnerabilityReporting: [
+      "DELETE /repos/{owner}/{repo}/private-vulnerability-reporting"
+    ],
+    disableVulnerabilityAlerts: [
+      "DELETE /repos/{owner}/{repo}/vulnerability-alerts"
+    ],
+    downloadArchive: [
+      "GET /repos/{owner}/{repo}/zipball/{ref}",
+      {},
+      { renamed: ["repos", "downloadZipballArchive"] }
+    ],
+    downloadTarballArchive: ["GET /repos/{owner}/{repo}/tarball/{ref}"],
+    downloadZipballArchive: ["GET /repos/{owner}/{repo}/zipball/{ref}"],
+    enableAutomatedSecurityFixes: [
+      "PUT /repos/{owner}/{repo}/automated-security-fixes"
+    ],
+    enablePrivateVulnerabilityReporting: [
+      "PUT /repos/{owner}/{repo}/private-vulnerability-reporting"
+    ],
+    enableVulnerabilityAlerts: [
+      "PUT /repos/{owner}/{repo}/vulnerability-alerts"
+    ],
+    generateReleaseNotes: [
+      "POST /repos/{owner}/{repo}/releases/generate-notes"
+    ],
+    get: ["GET /repos/{owner}/{repo}"],
+    getAccessRestrictions: [
+      "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions"
+    ],
+    getAdminBranchProtection: [
+      "GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"
+    ],
+    getAllDeploymentProtectionRules: [
+      "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules"
+    ],
+    getAllEnvironments: ["GET /repos/{owner}/{repo}/environments"],
+    getAllStatusCheckContexts: [
+      "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts"
+    ],
+    getAllTopics: ["GET /repos/{owner}/{repo}/topics"],
+    getAppsWithAccessToProtectedBranch: [
+      "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps"
+    ],
+    getAutolink: ["GET /repos/{owner}/{repo}/autolinks/{autolink_id}"],
+    getBranch: ["GET /repos/{owner}/{repo}/branches/{branch}"],
+    getBranchProtection: [
+      "GET /repos/{owner}/{repo}/branches/{branch}/protection"
+    ],
+    getBranchRules: ["GET /repos/{owner}/{repo}/rules/branches/{branch}"],
+    getClones: ["GET /repos/{owner}/{repo}/traffic/clones"],
+    getCodeFrequencyStats: ["GET /repos/{owner}/{repo}/stats/code_frequency"],
+    getCollaboratorPermissionLevel: [
+      "GET /repos/{owner}/{repo}/collaborators/{username}/permission"
+    ],
+    getCombinedStatusForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/status"],
+    getCommit: ["GET /repos/{owner}/{repo}/commits/{ref}"],
+    getCommitActivityStats: ["GET /repos/{owner}/{repo}/stats/commit_activity"],
+    getCommitComment: ["GET /repos/{owner}/{repo}/comments/{comment_id}"],
+    getCommitSignatureProtection: [
+      "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures"
+    ],
+    getCommunityProfileMetrics: ["GET /repos/{owner}/{repo}/community/profile"],
+    getContent: ["GET /repos/{owner}/{repo}/contents/{path}"],
+    getContributorsStats: ["GET /repos/{owner}/{repo}/stats/contributors"],
+    getCustomDeploymentProtectionRule: [
+      "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/{protection_rule_id}"
+    ],
+    getCustomPropertiesValues: ["GET /repos/{owner}/{repo}/properties/values"],
+    getDeployKey: ["GET /repos/{owner}/{repo}/keys/{key_id}"],
+    getDeployment: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}"],
+    getDeploymentBranchPolicy: [
+      "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}"
+    ],
+    getDeploymentStatus: [
+      "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}"
+    ],
+    getEnvironment: [
+      "GET /repos/{owner}/{repo}/environments/{environment_name}"
+    ],
+    getLatestPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/latest"],
+    getLatestRelease: ["GET /repos/{owner}/{repo}/releases/latest"],
+    getOrgRuleSuite: ["GET /orgs/{org}/rulesets/rule-suites/{rule_suite_id}"],
+    getOrgRuleSuites: ["GET /orgs/{org}/rulesets/rule-suites"],
+    getOrgRuleset: ["GET /orgs/{org}/rulesets/{ruleset_id}"],
+    getOrgRulesets: ["GET /orgs/{org}/rulesets"],
+    getPages: ["GET /repos/{owner}/{repo}/pages"],
+    getPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/{build_id}"],
+    getPagesDeployment: [
+      "GET /repos/{owner}/{repo}/pages/deployments/{pages_deployment_id}"
+    ],
+    getPagesHealthCheck: ["GET /repos/{owner}/{repo}/pages/health"],
+    getParticipationStats: ["GET /repos/{owner}/{repo}/stats/participation"],
+    getPullRequestReviewProtection: [
+      "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"
+    ],
+    getPunchCardStats: ["GET /repos/{owner}/{repo}/stats/punch_card"],
+    getReadme: ["GET /repos/{owner}/{repo}/readme"],
+    getReadmeInDirectory: ["GET /repos/{owner}/{repo}/readme/{dir}"],
+    getRelease: ["GET /repos/{owner}/{repo}/releases/{release_id}"],
+    getReleaseAsset: ["GET /repos/{owner}/{repo}/releases/assets/{asset_id}"],
+    getReleaseByTag: ["GET /repos/{owner}/{repo}/releases/tags/{tag}"],
+    getRepoRuleSuite: [
+      "GET /repos/{owner}/{repo}/rulesets/rule-suites/{rule_suite_id}"
+    ],
+    getRepoRuleSuites: ["GET /repos/{owner}/{repo}/rulesets/rule-suites"],
+    getRepoRuleset: ["GET /repos/{owner}/{repo}/rulesets/{ruleset_id}"],
+    getRepoRulesets: ["GET /repos/{owner}/{repo}/rulesets"],
+    getStatusChecksProtection: [
+      "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"
+    ],
+    getTeamsWithAccessToProtectedBranch: [
+      "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams"
+    ],
+    getTopPaths: ["GET /repos/{owner}/{repo}/traffic/popular/paths"],
+    getTopReferrers: ["GET /repos/{owner}/{repo}/traffic/popular/referrers"],
+    getUsersWithAccessToProtectedBranch: [
+      "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users"
+    ],
+    getViews: ["GET /repos/{owner}/{repo}/traffic/views"],
+    getWebhook: ["GET /repos/{owner}/{repo}/hooks/{hook_id}"],
+    getWebhookConfigForRepo: [
+      "GET /repos/{owner}/{repo}/hooks/{hook_id}/config"
+    ],
+    getWebhookDelivery: [
+      "GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}"
+    ],
+    listActivities: ["GET /repos/{owner}/{repo}/activity"],
+    listAutolinks: ["GET /repos/{owner}/{repo}/autolinks"],
+    listBranches: ["GET /repos/{owner}/{repo}/branches"],
+    listBranchesForHeadCommit: [
+      "GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head"
+    ],
+    listCollaborators: ["GET /repos/{owner}/{repo}/collaborators"],
+    listCommentsForCommit: [
+      "GET /repos/{owner}/{repo}/commits/{commit_sha}/comments"
+    ],
+    listCommitCommentsForRepo: ["GET /repos/{owner}/{repo}/comments"],
+    listCommitStatusesForRef: [
+      "GET /repos/{owner}/{repo}/commits/{ref}/statuses"
+    ],
+    listCommits: ["GET /repos/{owner}/{repo}/commits"],
+    listContributors: ["GET /repos/{owner}/{repo}/contributors"],
+    listCustomDeploymentRuleIntegrations: [
+      "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/apps"
+    ],
+    listDeployKeys: ["GET /repos/{owner}/{repo}/keys"],
+    listDeploymentBranchPolicies: [
+      "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies"
+    ],
+    listDeploymentStatuses: [
+      "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses"
+    ],
+    listDeployments: ["GET /repos/{owner}/{repo}/deployments"],
+    listForAuthenticatedUser: ["GET /user/repos"],
+    listForOrg: ["GET /orgs/{org}/repos"],
+    listForUser: ["GET /users/{username}/repos"],
+    listForks: ["GET /repos/{owner}/{repo}/forks"],
+    listInvitations: ["GET /repos/{owner}/{repo}/invitations"],
+    listInvitationsForAuthenticatedUser: ["GET /user/repository_invitations"],
+    listLanguages: ["GET /repos/{owner}/{repo}/languages"],
+    listPagesBuilds: ["GET /repos/{owner}/{repo}/pages/builds"],
+    listPublic: ["GET /repositories"],
+    listPullRequestsAssociatedWithCommit: [
+      "GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls"
+    ],
+    listReleaseAssets: [
+      "GET /repos/{owner}/{repo}/releases/{release_id}/assets"
+    ],
+    listReleases: ["GET /repos/{owner}/{repo}/releases"],
+    listTagProtection: ["GET /repos/{owner}/{repo}/tags/protection"],
+    listTags: ["GET /repos/{owner}/{repo}/tags"],
+    listTeams: ["GET /repos/{owner}/{repo}/teams"],
+    listWebhookDeliveries: [
+      "GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries"
+    ],
+    listWebhooks: ["GET /repos/{owner}/{repo}/hooks"],
+    merge: ["POST /repos/{owner}/{repo}/merges"],
+    mergeUpstream: ["POST /repos/{owner}/{repo}/merge-upstream"],
+    pingWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/pings"],
+    redeliverWebhookDelivery: [
+      "POST /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}/attempts"
+    ],
+    removeAppAccessRestrictions: [
+      "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps",
+      {},
+      { mapToData: "apps" }
+    ],
+    removeCollaborator: [
+      "DELETE /repos/{owner}/{repo}/collaborators/{username}"
+    ],
+    removeStatusCheckContexts: [
+      "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts",
+      {},
+      { mapToData: "contexts" }
+    ],
+    removeStatusCheckProtection: [
+      "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"
+    ],
+    removeTeamAccessRestrictions: [
+      "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams",
+      {},
+      { mapToData: "teams" }
+    ],
+    removeUserAccessRestrictions: [
+      "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users",
+      {},
+      { mapToData: "users" }
+    ],
+    renameBranch: ["POST /repos/{owner}/{repo}/branches/{branch}/rename"],
+    replaceAllTopics: ["PUT /repos/{owner}/{repo}/topics"],
+    requestPagesBuild: ["POST /repos/{owner}/{repo}/pages/builds"],
+    setAdminBranchProtection: [
+      "POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"
+    ],
+    setAppAccessRestrictions: [
+      "PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps",
+      {},
+      { mapToData: "apps" }
+    ],
+    setStatusCheckContexts: [
+      "PUT /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts",
+      {},
+      { mapToData: "contexts" }
+    ],
+    setTeamAccessRestrictions: [
+      "PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams",
+      {},
+      { mapToData: "teams" }
+    ],
+    setUserAccessRestrictions: [
+      "PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users",
+      {},
+      { mapToData: "users" }
+    ],
+    testPushWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/tests"],
+    transfer: ["POST /repos/{owner}/{repo}/transfer"],
+    update: ["PATCH /repos/{owner}/{repo}"],
+    updateBranchProtection: [
+      "PUT /repos/{owner}/{repo}/branches/{branch}/protection"
+    ],
+    updateCommitComment: ["PATCH /repos/{owner}/{repo}/comments/{comment_id}"],
+    updateDeploymentBranchPolicy: [
+      "PUT /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}"
+    ],
+    updateInformationAboutPagesSite: ["PUT /repos/{owner}/{repo}/pages"],
+    updateInvitation: [
+      "PATCH /repos/{owner}/{repo}/invitations/{invitation_id}"
+    ],
+    updateOrgRuleset: ["PUT /orgs/{org}/rulesets/{ruleset_id}"],
+    updatePullRequestReviewProtection: [
+      "PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"
+    ],
+    updateRelease: ["PATCH /repos/{owner}/{repo}/releases/{release_id}"],
+    updateReleaseAsset: [
+      "PATCH /repos/{owner}/{repo}/releases/assets/{asset_id}"
+    ],
+    updateRepoRuleset: ["PUT /repos/{owner}/{repo}/rulesets/{ruleset_id}"],
+    updateStatusCheckPotection: [
+      "PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks",
+      {},
+      { renamed: ["repos", "updateStatusCheckProtection"] }
+    ],
+    updateStatusCheckProtection: [
+      "PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"
+    ],
+    updateWebhook: ["PATCH /repos/{owner}/{repo}/hooks/{hook_id}"],
+    updateWebhookConfigForRepo: [
+      "PATCH /repos/{owner}/{repo}/hooks/{hook_id}/config"
+    ],
+    uploadReleaseAsset: [
+      "POST /repos/{owner}/{repo}/releases/{release_id}/assets{?name,label}",
+      { baseUrl: "https://uploads.github.com" }
+    ]
+  },
+  search: {
+    code: ["GET /search/code"],
+    commits: ["GET /search/commits"],
+    issuesAndPullRequests: ["GET /search/issues"],
+    labels: ["GET /search/labels"],
+    repos: ["GET /search/repositories"],
+    topics: ["GET /search/topics"],
+    users: ["GET /search/users"]
+  },
+  secretScanning: {
+    getAlert: [
+      "GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}"
+    ],
+    listAlertsForEnterprise: [
+      "GET /enterprises/{enterprise}/secret-scanning/alerts"
+    ],
+    listAlertsForOrg: ["GET /orgs/{org}/secret-scanning/alerts"],
+    listAlertsForRepo: ["GET /repos/{owner}/{repo}/secret-scanning/alerts"],
+    listLocationsForAlert: [
+      "GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations"
+    ],
+    updateAlert: [
+      "PATCH /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}"
+    ]
+  },
+  securityAdvisories: {
+    createFork: [
+      "POST /repos/{owner}/{repo}/security-advisories/{ghsa_id}/forks"
+    ],
+    createPrivateVulnerabilityReport: [
+      "POST /repos/{owner}/{repo}/security-advisories/reports"
+    ],
+    createRepositoryAdvisory: [
+      "POST /repos/{owner}/{repo}/security-advisories"
+    ],
+    createRepositoryAdvisoryCveRequest: [
+      "POST /repos/{owner}/{repo}/security-advisories/{ghsa_id}/cve"
+    ],
+    getGlobalAdvisory: ["GET /advisories/{ghsa_id}"],
+    getRepositoryAdvisory: [
+      "GET /repos/{owner}/{repo}/security-advisories/{ghsa_id}"
+    ],
+    listGlobalAdvisories: ["GET /advisories"],
+    listOrgRepositoryAdvisories: ["GET /orgs/{org}/security-advisories"],
+    listRepositoryAdvisories: ["GET /repos/{owner}/{repo}/security-advisories"],
+    updateRepositoryAdvisory: [
+      "PATCH /repos/{owner}/{repo}/security-advisories/{ghsa_id}"
+    ]
+  },
+  teams: {
+    addOrUpdateMembershipForUserInOrg: [
+      "PUT /orgs/{org}/teams/{team_slug}/memberships/{username}"
+    ],
+    addOrUpdateProjectPermissionsInOrg: [
+      "PUT /orgs/{org}/teams/{team_slug}/projects/{project_id}"
+    ],
+    addOrUpdateRepoPermissionsInOrg: [
+      "PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"
+    ],
+    checkPermissionsForProjectInOrg: [
+      "GET /orgs/{org}/teams/{team_slug}/projects/{project_id}"
+    ],
+    checkPermissionsForRepoInOrg: [
+      "GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"
+    ],
+    create: ["POST /orgs/{org}/teams"],
+    createDiscussionCommentInOrg: [
+      "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments"
+    ],
+    createDiscussionInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions"],
+    deleteDiscussionCommentInOrg: [
+      "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"
+    ],
+    deleteDiscussionInOrg: [
+      "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"
+    ],
+    deleteInOrg: ["DELETE /orgs/{org}/teams/{team_slug}"],
+    getByName: ["GET /orgs/{org}/teams/{team_slug}"],
+    getDiscussionCommentInOrg: [
+      "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"
+    ],
+    getDiscussionInOrg: [
+      "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"
+    ],
+    getMembershipForUserInOrg: [
+      "GET /orgs/{org}/teams/{team_slug}/memberships/{username}"
+    ],
+    list: ["GET /orgs/{org}/teams"],
+    listChildInOrg: ["GET /orgs/{org}/teams/{team_slug}/teams"],
+    listDiscussionCommentsInOrg: [
+      "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments"
+    ],
+    listDiscussionsInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions"],
+    listForAuthenticatedUser: ["GET /user/teams"],
+    listMembersInOrg: ["GET /orgs/{org}/teams/{team_slug}/members"],
+    listPendingInvitationsInOrg: [
+      "GET /orgs/{org}/teams/{team_slug}/invitations"
+    ],
+    listProjectsInOrg: ["GET /orgs/{org}/teams/{team_slug}/projects"],
+    listReposInOrg: ["GET /orgs/{org}/teams/{team_slug}/repos"],
+    removeMembershipForUserInOrg: [
+      "DELETE /orgs/{org}/teams/{team_slug}/memberships/{username}"
+    ],
+    removeProjectInOrg: [
+      "DELETE /orgs/{org}/teams/{team_slug}/projects/{project_id}"
+    ],
+    removeRepoInOrg: [
+      "DELETE /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"
+    ],
+    updateDiscussionCommentInOrg: [
+      "PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"
+    ],
+    updateDiscussionInOrg: [
+      "PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"
+    ],
+    updateInOrg: ["PATCH /orgs/{org}/teams/{team_slug}"]
+  },
+  users: {
+    addEmailForAuthenticated: [
+      "POST /user/emails",
+      {},
+      { renamed: ["users", "addEmailForAuthenticatedUser"] }
+    ],
+    addEmailForAuthenticatedUser: ["POST /user/emails"],
+    addSocialAccountForAuthenticatedUser: ["POST /user/social_accounts"],
+    block: ["PUT /user/blocks/{username}"],
+    checkBlocked: ["GET /user/blocks/{username}"],
+    checkFollowingForUser: ["GET /users/{username}/following/{target_user}"],
+    checkPersonIsFollowedByAuthenticated: ["GET /user/following/{username}"],
+    createGpgKeyForAuthenticated: [
+      "POST /user/gpg_keys",
+      {},
+      { renamed: ["users", "createGpgKeyForAuthenticatedUser"] }
+    ],
+    createGpgKeyForAuthenticatedUser: ["POST /user/gpg_keys"],
+    createPublicSshKeyForAuthenticated: [
+      "POST /user/keys",
+      {},
+      { renamed: ["users", "createPublicSshKeyForAuthenticatedUser"] }
+    ],
+    createPublicSshKeyForAuthenticatedUser: ["POST /user/keys"],
+    createSshSigningKeyForAuthenticatedUser: ["POST /user/ssh_signing_keys"],
+    deleteEmailForAuthenticated: [
+      "DELETE /user/emails",
+      {},
+      { renamed: ["users", "deleteEmailForAuthenticatedUser"] }
+    ],
+    deleteEmailForAuthenticatedUser: ["DELETE /user/emails"],
+    deleteGpgKeyForAuthenticated: [
+      "DELETE /user/gpg_keys/{gpg_key_id}",
+      {},
+      { renamed: ["users", "deleteGpgKeyForAuthenticatedUser"] }
+    ],
+    deleteGpgKeyForAuthenticatedUser: ["DELETE /user/gpg_keys/{gpg_key_id}"],
+    deletePublicSshKeyForAuthenticated: [
+      "DELETE /user/keys/{key_id}",
+      {},
+      { renamed: ["users", "deletePublicSshKeyForAuthenticatedUser"] }
+    ],
+    deletePublicSshKeyForAuthenticatedUser: ["DELETE /user/keys/{key_id}"],
+    deleteSocialAccountForAuthenticatedUser: ["DELETE /user/social_accounts"],
+    deleteSshSigningKeyForAuthenticatedUser: [
+      "DELETE /user/ssh_signing_keys/{ssh_signing_key_id}"
+    ],
+    follow: ["PUT /user/following/{username}"],
+    getAuthenticated: ["GET /user"],
+    getByUsername: ["GET /users/{username}"],
+    getContextForUser: ["GET /users/{username}/hovercard"],
+    getGpgKeyForAuthenticated: [
+      "GET /user/gpg_keys/{gpg_key_id}",
+      {},
+      { renamed: ["users", "getGpgKeyForAuthenticatedUser"] }
+    ],
+    getGpgKeyForAuthenticatedUser: ["GET /user/gpg_keys/{gpg_key_id}"],
+    getPublicSshKeyForAuthenticated: [
+      "GET /user/keys/{key_id}",
+      {},
+      { renamed: ["users", "getPublicSshKeyForAuthenticatedUser"] }
+    ],
+    getPublicSshKeyForAuthenticatedUser: ["GET /user/keys/{key_id}"],
+    getSshSigningKeyForAuthenticatedUser: [
+      "GET /user/ssh_signing_keys/{ssh_signing_key_id}"
+    ],
+    list: ["GET /users"],
+    listBlockedByAuthenticated: [
+      "GET /user/blocks",
+      {},
+      { renamed: ["users", "listBlockedByAuthenticatedUser"] }
+    ],
+    listBlockedByAuthenticatedUser: ["GET /user/blocks"],
+    listEmailsForAuthenticated: [
+      "GET /user/emails",
+      {},
+      { renamed: ["users", "listEmailsForAuthenticatedUser"] }
+    ],
+    listEmailsForAuthenticatedUser: ["GET /user/emails"],
+    listFollowedByAuthenticated: [
+      "GET /user/following",
+      {},
+      { renamed: ["users", "listFollowedByAuthenticatedUser"] }
+    ],
+    listFollowedByAuthenticatedUser: ["GET /user/following"],
+    listFollowersForAuthenticatedUser: ["GET /user/followers"],
+    listFollowersForUser: ["GET /users/{username}/followers"],
+    listFollowingForUser: ["GET /users/{username}/following"],
+    listGpgKeysForAuthenticated: [
+      "GET /user/gpg_keys",
+      {},
+      { renamed: ["users", "listGpgKeysForAuthenticatedUser"] }
+    ],
+    listGpgKeysForAuthenticatedUser: ["GET /user/gpg_keys"],
+    listGpgKeysForUser: ["GET /users/{username}/gpg_keys"],
+    listPublicEmailsForAuthenticated: [
+      "GET /user/public_emails",
+      {},
+      { renamed: ["users", "listPublicEmailsForAuthenticatedUser"] }
+    ],
+    listPublicEmailsForAuthenticatedUser: ["GET /user/public_emails"],
+    listPublicKeysForUser: ["GET /users/{username}/keys"],
+    listPublicSshKeysForAuthenticated: [
+      "GET /user/keys",
+      {},
+      { renamed: ["users", "listPublicSshKeysForAuthenticatedUser"] }
+    ],
+    listPublicSshKeysForAuthenticatedUser: ["GET /user/keys"],
+    listSocialAccountsForAuthenticatedUser: ["GET /user/social_accounts"],
+    listSocialAccountsForUser: ["GET /users/{username}/social_accounts"],
+    listSshSigningKeysForAuthenticatedUser: ["GET /user/ssh_signing_keys"],
+    listSshSigningKeysForUser: ["GET /users/{username}/ssh_signing_keys"],
+    setPrimaryEmailVisibilityForAuthenticated: [
+      "PATCH /user/email/visibility",
+      {},
+      { renamed: ["users", "setPrimaryEmailVisibilityForAuthenticatedUser"] }
+    ],
+    setPrimaryEmailVisibilityForAuthenticatedUser: [
+      "PATCH /user/email/visibility"
+    ],
+    unblock: ["DELETE /user/blocks/{username}"],
+    unfollow: ["DELETE /user/following/{username}"],
+    updateAuthenticated: ["PATCH /user"]
+  }
 };
-
-// src/dismiss-reviews.ts
-var requestReviewsMutation = (
-  /* GraphQL */
-  `
-  mutation dismissReview($message: String!, $pullRequestReviewId: ID!) {
-    dismissPullRequestReview(
-      input: { message: $message, pullRequestReviewId: $pullRequestReviewId }
-    ) {
-      clientMutationId
+var endpoints_default2 = Endpoints2;
+
+// node_modules/@octokit/action/node_modules/@octokit/plugin-rest-endpoint-methods/dist-src/endpoints-to-methods.js
+var endpointMethodsMap2 = /* @__PURE__ */ new Map();
+for (const [scope, endpoints] of Object.entries(endpoints_default2)) {
+  for (const [methodName, endpoint3] of Object.entries(endpoints)) {
+    const [route, defaults2, decorations] = endpoint3;
+    const [method, url] = route.split(/ /);
+    const endpointDefaults = Object.assign(
+      {
+        method,
+        url
+      },
+      defaults2
+    );
+    if (!endpointMethodsMap2.has(scope)) {
+      endpointMethodsMap2.set(scope, /* @__PURE__ */ new Map());
+    }
+    endpointMethodsMap2.get(scope).set(methodName, {
+      scope,
+      methodName,
+      endpointDefaults,
+      decorations
+    });
+  }
+}
+var handler2 = {
+  has({ scope }, methodName) {
+    return endpointMethodsMap2.get(scope).has(methodName);
+  },
+  getOwnPropertyDescriptor(target, methodName) {
+    return {
+      value: this.get(target, methodName),
+      // ensures method is in the cache
+      configurable: true,
+      writable: true,
+      enumerable: true
+    };
+  },
+  defineProperty(target, methodName, descriptor) {
+    Object.defineProperty(target.cache, methodName, descriptor);
+    return true;
+  },
+  deleteProperty(target, methodName) {
+    delete target.cache[methodName];
+    return true;
+  },
+  ownKeys({ scope }) {
+    return [...endpointMethodsMap2.get(scope).keys()];
+  },
+  set(target, methodName, value) {
+    return target.cache[methodName] = value;
+  },
+  get({ octokit, scope, cache: cache2 }, methodName) {
+    if (cache2[methodName]) {
+      return cache2[methodName];
+    }
+    const method = endpointMethodsMap2.get(scope).get(methodName);
+    if (!method) {
+      return void 0;
     }
+    const { endpointDefaults, decorations } = method;
+    if (decorations) {
+      cache2[methodName] = decorate2(
+        octokit,
+        scope,
+        methodName,
+        endpointDefaults,
+        decorations
+      );
+    } else {
+      cache2[methodName] = octokit.request.defaults(endpointDefaults);
+    }
+    return cache2[methodName];
   }
-`
-);
-var dismissReviews = async ({
-  octokit,
-  message,
-  reviewsToDismiss
-}) => Promise.all(
-  reviewsToDismiss.map(async ({ id: pullRequestReviewId, author }) => {
-    try {
-      await octokit.graphql(requestReviewsMutation, {
-        message,
-        pullRequestReviewId
+};
+function endpointsToMethods2(octokit) {
+  const newMethods = {};
+  for (const scope of endpointMethodsMap2.keys()) {
+    newMethods[scope] = new Proxy({ octokit, scope, cache: {} }, handler2);
+  }
+  return newMethods;
+}
+function decorate2(octokit, scope, methodName, defaults2, decorations) {
+  const requestWithDefaults = octokit.request.defaults(defaults2);
+  function withDecorations(...args) {
+    let options = requestWithDefaults.endpoint.merge(...args);
+    if (decorations.mapToData) {
+      options = Object.assign({}, options, {
+        data: options[decorations.mapToData],
+        [decorations.mapToData]: void 0
       });
-    } catch {
-      console.error(`Failed to dismiss review from ${author?.login}.`);
+      return requestWithDefaults(options);
     }
-  })
-);
-
-// src/get-pr-data.ts
-var getPullRequestQuery2 = (
-  /* GraphQL */
-  `
-  query getPrData($nodeId: ID!, $cursor: String) {
-    node(id: $nodeId) {
-      __typename
-      ... on PullRequest {
-        commits(last: 1) {
-          nodes {
-            commit {
-              oid
-              committedDate
-            }
-          }
-        }
-        latestOpinionatedReviews(first: 100, after: $cursor) {
-          nodes {
-            id
-            state
-            commit {
-              oid
-            }
-            author {
-              __typename
-              login
-              ... on User {
-                id
-              }
-            }
-          }
-          pageInfo {
-            hasNextPage
-            endCursor
+    if (decorations.renamed) {
+      const [newScope, newMethodName] = decorations.renamed;
+      octokit.log.warn(
+        `octokit.${scope}.${methodName}() has been renamed to octokit.${newScope}.${newMethodName}()`
+      );
+    }
+    if (decorations.deprecated) {
+      octokit.log.warn(decorations.deprecated);
+    }
+    if (decorations.renamedParameters) {
+      const options2 = requestWithDefaults.endpoint.merge(...args);
+      for (const [name, alias] of Object.entries(
+        decorations.renamedParameters
+      )) {
+        if (name in options2) {
+          octokit.log.warn(
+            `"${name}" parameter is deprecated for "octokit.${scope}.${methodName}()". Use "${alias}" instead`
+          );
+          if (!(alias in options2)) {
+            options2[alias] = options2[name];
           }
+          delete options2[name];
         }
       }
+      return requestWithDefaults(options2);
     }
+    return requestWithDefaults(...args);
   }
-`
-);
-var getPrData = async ({
-  octokit,
-  pullRequestId
-}) => {
-  const { node: pullRequest } = await octokit.graphql.paginate(
-    getPullRequestQuery2,
-    {
-      nodeId: pullRequestId
-    }
-  );
-  if (!pullRequest || pullRequest.__typename !== "PullRequest") {
-    throw new Error("The pull request could not be found!");
-  }
-  if (!pullRequest.commits.nodes) {
-    throw new Error("Pull request commits are missing!");
-  }
+  return Object.assign(withDecorations, requestWithDefaults);
+}
+
+// node_modules/@octokit/action/node_modules/@octokit/plugin-rest-endpoint-methods/dist-src/index.js
+function restEndpointMethods2(octokit) {
+  const api = endpointsToMethods2(octokit);
   return {
-    commits: pullRequest.commits.nodes.filter(isPresent),
-    latestReviews: pullRequest.latestOpinionatedReviews?.nodes?.filter(isPresent) ?? []
+    rest: api
   };
-};
-
-// node_modules/@octokit/action/dist-src/index.js
-init_dist_web4();
-var import_auth_action = __toESM(require_dist_node());
-init_dist_web5();
-init_dist_src5();
-
-// node_modules/@octokit/action/dist-src/version.js
-var VERSION7 = "6.0.7";
+}
+restEndpointMethods2.VERSION = VERSION12;
+function legacyRestEndpointMethods2(octokit) {
+  const api = endpointsToMethods2(octokit);
+  return {
+    ...api,
+    rest: api
+  };
+}
+legacyRestEndpointMethods2.VERSION = VERSION12;
 
-// node_modules/@octokit/action/dist-src/index.js
-var import_undici = __toESM(require_undici2());
-var DEFAULTS2 = {
-  authStrategy: import_auth_action.createActionAuth,
+// node_modules/@octokit/action/dist-bundle/index.js
+var import_undici = __toESM(require_undici2(), 1);
+var VERSION13 = "0.0.0-development";
+var DEFAULTS3 = {
+  authStrategy: createActionAuth,
   baseUrl: getApiBaseUrl(),
-  userAgent: `octokit-action.js/${VERSION7}`
+  userAgent: `octokit-action.js/${VERSION13}`
 };
 function getProxyAgent() {
   const httpProxy = process.env["HTTP_PROXY"] || process.env["http_proxy"];
@@ -55791,24 +60297,23 @@ var customFetch = async function(url, opts) {
     ...opts
   });
 };
-var Octokit2 = Octokit.plugin(
-  paginateRest,
-  legacyRestEndpointMethods
-).defaults(function buildDefaults(options) {
-  return {
-    ...DEFAULTS2,
-    ...options,
-    request: {
-      fetch: customFetch,
-      ...options.request
-    }
-  };
-});
+var Octokit3 = Octokit2.plugin(paginateRest2, legacyRestEndpointMethods2).defaults(
+  function buildDefaults(options) {
+    return {
+      ...DEFAULTS3,
+      ...options,
+      request: {
+        fetch: customFetch,
+        ...options.request
+      }
+    };
+  }
+);
 function getApiBaseUrl() {
   return process.env["GITHUB_API_URL"] || "https://api.github.com";
 }
 
-// node_modules/@octokit/plugin-paginate-graphql/dist-src/errors.js
+// node_modules/@octokit/plugin-paginate-graphql/dist-bundle/index.js
 var generateMessage = (path2, cursorValue) => `The cursor at "${path2.join(
   ","
 )}" did not change its value "${cursorValue}" after a page transition. Please make sure your that your query is set up correctly.`;
@@ -55817,11 +60322,11 @@ var MissingCursorChange = class extends Error {
     super(generateMessage(pageInfo.pathInQuery, cursorValue));
     this.pageInfo = pageInfo;
     this.cursorValue = cursorValue;
-    this.name = "MissingCursorChangeError";
     if (Error.captureStackTrace) {
       Error.captureStackTrace(this, this.constructor);
     }
   }
+  name = "MissingCursorChangeError";
 };
 var MissingPageInfo = class extends Error {
   constructor(response) {
@@ -55833,14 +60338,12 @@ var MissingPageInfo = class extends Error {
       )}`
     );
     this.response = response;
-    this.name = "MissingPageInfo";
     if (Error.captureStackTrace) {
       Error.captureStackTrace(this, this.constructor);
     }
   }
+  name = "MissingPageInfo";
 };
-
-// node_modules/@octokit/plugin-paginate-graphql/dist-src/object-helpers.js
 var isObject = (value) => Object.prototype.toString.call(value) === "[object Object]";
 function findPaginatedResourcePath(responseData) {
   const paginatedResourcePath = deepFindPathToProperty(
@@ -55856,10 +60359,10 @@ var deepFindPathToProperty = (object, searchProp, path2 = []) => {
   for (const key of Object.keys(object)) {
     const currentPath = [...path2, key];
     const currentValue = object[key];
-    if (currentValue.hasOwnProperty(searchProp)) {
-      return currentPath;
-    }
     if (isObject(currentValue)) {
+      if (currentValue.hasOwnProperty(searchProp)) {
+        return currentPath;
+      }
       const result = deepFindPathToProperty(
         currentValue,
         searchProp,
@@ -55885,8 +60388,6 @@ var set = (object, path2, mutator) => {
     parent[lastProperty] = mutator;
   }
 };
-
-// node_modules/@octokit/plugin-paginate-graphql/dist-src/extract-page-info.js
 var extractPageInfos = (responseData) => {
   const pageInfoPath = findPaginatedResourcePath(responseData);
   return {
@@ -55894,15 +60395,11 @@ var extractPageInfos = (responseData) => {
     pageInfo: get(responseData, [...pageInfoPath, "pageInfo"])
   };
 };
-
-// node_modules/@octokit/plugin-paginate-graphql/dist-src/page-info.js
 var isForwardSearch = (givenPageInfo) => {
   return givenPageInfo.hasOwnProperty("hasNextPage");
 };
 var getCursorFrom = (pageInfo) => isForwardSearch(pageInfo) ? pageInfo.endCursor : pageInfo.startCursor;
 var hasAnotherPage = (pageInfo) => isForwardSearch(pageInfo) ? pageInfo.hasNextPage : pageInfo.hasPreviousPage;
-
-// node_modules/@octokit/plugin-paginate-graphql/dist-src/iterator.js
 var createIterator = (octokit) => {
   return (query, initialParameters = {}) => {
     let nextPageExists = true;
@@ -55910,8 +60407,7 @@ var createIterator = (octokit) => {
     return {
       [Symbol.asyncIterator]: () => ({
         async next() {
-          if (!nextPageExists)
-            return { done: true, value: {} };
+          if (!nextPageExists) return { done: true, value: {} };
           const response = await octokit.graphql(
             query,
             parameters
@@ -55932,8 +60428,6 @@ var createIterator = (octokit) => {
     };
   };
 };
-
-// node_modules/@octokit/plugin-paginate-graphql/dist-src/merge-responses.js
 var mergeResponses = (response1, response2) => {
   if (Object.keys(response1).length === 0) {
     return Object.assign(response1, response2);
@@ -55957,13 +60451,11 @@ var mergeResponses = (response1, response2) => {
   set(response1, pageInfoPath, get(response2, pageInfoPath));
   return response1;
 };
-
-// node_modules/@octokit/plugin-paginate-graphql/dist-src/paginate.js
 var createPaginate = (octokit) => {
-  const iterator2 = createIterator(octokit);
+  const iterator3 = createIterator(octokit);
   return async (query, initialParameters = {}) => {
     let mergedResponse = {};
-    for await (const response of iterator2(
+    for await (const response of iterator3(
       query,
       initialParameters
     )) {
@@ -55972,10 +60464,7 @@ var createPaginate = (octokit) => {
     return mergedResponse;
   };
 };
-
-// node_modules/@octokit/plugin-paginate-graphql/dist-src/index.js
-function paginateGraphql(octokit) {
-  octokit.graphql;
+function paginateGraphQL(octokit) {
   return {
     graphql: Object.assign(octokit.graphql, {
       paginate: Object.assign(createPaginate(octokit), {
@@ -56285,7 +60774,7 @@ var getNonSpecFormDataBoundary = (0, import_node_util.deprecate)(
   "form-data doesn't follow the spec and requires special treatment. Use alternative package",
   "https://github.com/node-fetch/node-fetch/issues/1167"
 );
-var extractContentType = (body, request2) => {
+var extractContentType = (body, request3) => {
   if (body === null) {
     return null;
   }
@@ -56302,7 +60791,7 @@ var extractContentType = (body, request2) => {
     return null;
   }
   if (body instanceof FormData) {
-    return `multipart/form-data; boundary=${request2[INTERNALS].boundary}`;
+    return `multipart/form-data; boundary=${request3[INTERNALS].boundary}`;
   }
   if (body && typeof body.getBoundary === "function") {
     return `multipart/form-data;boundary=${getNonSpecFormDataBoundary(body)}`;
@@ -56312,8 +60801,8 @@ var extractContentType = (body, request2) => {
   }
   return "text/plain;charset=UTF-8";
 };
-var getTotalBytes = (request2) => {
-  const { body } = request2[INTERNALS];
+var getTotalBytes = (request3) => {
+  const { body } = request3[INTERNALS];
   if (body === null) {
     return 0;
   }
@@ -56729,15 +61218,15 @@ function isUrlPotentiallyTrustworthy(url) {
   }
   return isOriginPotentiallyTrustworthy(url);
 }
-function determineRequestsReferrer(request2, { referrerURLCallback, referrerOriginCallback } = {}) {
-  if (request2.referrer === "no-referrer" || request2.referrerPolicy === "") {
+function determineRequestsReferrer(request3, { referrerURLCallback, referrerOriginCallback } = {}) {
+  if (request3.referrer === "no-referrer" || request3.referrerPolicy === "") {
     return null;
   }
-  const policy = request2.referrerPolicy;
-  if (request2.referrer === "about:client") {
+  const policy = request3.referrerPolicy;
+  if (request3.referrer === "about:client") {
     return "no-referrer";
   }
-  const referrerSource = request2.referrer;
+  const referrerSource = request3.referrer;
   let referrerURL = stripURLForUseAsAReferrer(referrerSource);
   let referrerOrigin = stripURLForUseAsAReferrer(referrerSource, true);
   if (referrerURL.toString().length > 4096) {
@@ -56749,7 +61238,7 @@ function determineRequestsReferrer(request2, { referrerURLCallback, referrerOrig
   if (referrerOriginCallback) {
     referrerOrigin = referrerOriginCallback(referrerOrigin);
   }
-  const currentURL = new URL(request2.url);
+  const currentURL = new URL(request3.url);
   switch (policy) {
     case "no-referrer":
       return "no-referrer";
@@ -56936,18 +61425,18 @@ Object.defineProperties(Request.prototype, {
   referrer: { enumerable: true },
   referrerPolicy: { enumerable: true }
 });
-var getNodeRequestOptions = (request2) => {
-  const { parsedURL } = request2[INTERNALS3];
-  const headers = new Headers(request2[INTERNALS3].headers);
+var getNodeRequestOptions = (request3) => {
+  const { parsedURL } = request3[INTERNALS3];
+  const headers = new Headers(request3[INTERNALS3].headers);
   if (!headers.has("Accept")) {
     headers.set("Accept", "*/*");
   }
   let contentLengthValue = null;
-  if (request2.body === null && /^(post|put)$/i.test(request2.method)) {
+  if (request3.body === null && /^(post|put)$/i.test(request3.method)) {
     contentLengthValue = "0";
   }
-  if (request2.body !== null) {
-    const totalBytes = getTotalBytes(request2);
+  if (request3.body !== null) {
+    const totalBytes = getTotalBytes(request3);
     if (typeof totalBytes === "number" && !Number.isNaN(totalBytes)) {
       contentLengthValue = String(totalBytes);
     }
@@ -56955,24 +61444,24 @@ var getNodeRequestOptions = (request2) => {
   if (contentLengthValue) {
     headers.set("Content-Length", contentLengthValue);
   }
-  if (request2.referrerPolicy === "") {
-    request2.referrerPolicy = DEFAULT_REFERRER_POLICY;
+  if (request3.referrerPolicy === "") {
+    request3.referrerPolicy = DEFAULT_REFERRER_POLICY;
   }
-  if (request2.referrer && request2.referrer !== "no-referrer") {
-    request2[INTERNALS3].referrer = determineRequestsReferrer(request2);
+  if (request3.referrer && request3.referrer !== "no-referrer") {
+    request3[INTERNALS3].referrer = determineRequestsReferrer(request3);
   } else {
-    request2[INTERNALS3].referrer = "no-referrer";
+    request3[INTERNALS3].referrer = "no-referrer";
   }
-  if (request2[INTERNALS3].referrer instanceof URL) {
-    headers.set("Referer", request2.referrer);
+  if (request3[INTERNALS3].referrer instanceof URL) {
+    headers.set("Referer", request3.referrer);
   }
   if (!headers.has("User-Agent")) {
     headers.set("User-Agent", "node-fetch");
   }
-  if (request2.compress && !headers.has("Accept-Encoding")) {
+  if (request3.compress && !headers.has("Accept-Encoding")) {
     headers.set("Accept-Encoding", "gzip, deflate, br");
   }
-  let { agent } = request2;
+  let { agent } = request3;
   if (typeof agent === "function") {
     agent = agent(parsedURL);
   }
@@ -56981,9 +61470,9 @@ var getNodeRequestOptions = (request2) => {
     // Overwrite search to retain trailing ? (issue #776)
     path: parsedURL.pathname + search,
     // The following options are not expressed in the URL
-    method: request2.method,
+    method: request3.method,
     headers: headers[Symbol.for("nodejs.util.inspect.custom")](),
-    insecureHTTPParser: request2.insecureHTTPParser,
+    insecureHTTPParser: request3.insecureHTTPParser,
     agent
   };
   return {
@@ -57006,25 +61495,25 @@ init_from();
 var supportedSchemas = /* @__PURE__ */ new Set(["data:", "http:", "https:"]);
 async function fetch(url, options_) {
   return new Promise((resolve, reject) => {
-    const request2 = new Request(url, options_);
-    const { parsedURL, options } = getNodeRequestOptions(request2);
+    const request3 = new Request(url, options_);
+    const { parsedURL, options } = getNodeRequestOptions(request3);
     if (!supportedSchemas.has(parsedURL.protocol)) {
       throw new TypeError(`node-fetch cannot load ${url}. URL scheme "${parsedURL.protocol.replace(/:$/, "")}" is not supported.`);
     }
     if (parsedURL.protocol === "data:") {
-      const data = dist_default(request2.url);
+      const data = dist_default(request3.url);
       const response2 = new Response(data, { headers: { "Content-Type": data.typeFull } });
       resolve(response2);
       return;
     }
     const send = (parsedURL.protocol === "https:" ? import_node_https.default : import_node_http2.default).request;
-    const { signal } = request2;
+    const { signal } = request3;
     let response = null;
     const abort = () => {
       const error = new AbortError("The operation was aborted.");
       reject(error);
-      if (request2.body && request2.body instanceof import_node_stream2.default.Readable) {
-        request2.body.destroy(error);
+      if (request3.body && request3.body instanceof import_node_stream2.default.Readable) {
+        request3.body.destroy(error);
       }
       if (!response || !response.body) {
         return;
@@ -57050,7 +61539,7 @@ async function fetch(url, options_) {
       }
     };
     request_.on("error", (error) => {
-      reject(new FetchError(`request to ${request2.url} failed, reason: ${error.message}`, "system", error));
+      reject(new FetchError(`request to ${request3.url} failed, reason: ${error.message}`, "system", error));
       finalize();
     });
     fixResponseChunkedTransferBadEnding(request_, (error) => {
@@ -57080,17 +61569,17 @@ async function fetch(url, options_) {
         const location = headers.get("Location");
         let locationURL = null;
         try {
-          locationURL = location === null ? null : new URL(location, request2.url);
+          locationURL = location === null ? null : new URL(location, request3.url);
         } catch {
-          if (request2.redirect !== "manual") {
+          if (request3.redirect !== "manual") {
             reject(new FetchError(`uri requested responds with an invalid redirect URL: ${location}`, "invalid-redirect"));
             finalize();
             return;
           }
         }
-        switch (request2.redirect) {
+        switch (request3.redirect) {
           case "error":
-            reject(new FetchError(`uri requested responds with a redirect, redirect mode is set to error: ${request2.url}`, "no-redirect"));
+            reject(new FetchError(`uri requested responds with a redirect, redirect mode is set to error: ${request3.url}`, "no-redirect"));
             finalize();
             return;
           case "manual":
@@ -57099,35 +61588,35 @@ async function fetch(url, options_) {
             if (locationURL === null) {
               break;
             }
-            if (request2.counter >= request2.follow) {
-              reject(new FetchError(`maximum redirect reached at: ${request2.url}`, "max-redirect"));
+            if (request3.counter >= request3.follow) {
+              reject(new FetchError(`maximum redirect reached at: ${request3.url}`, "max-redirect"));
               finalize();
               return;
             }
             const requestOptions = {
-              headers: new Headers(request2.headers),
-              follow: request2.follow,
-              counter: request2.counter + 1,
-              agent: request2.agent,
-              compress: request2.compress,
-              method: request2.method,
-              body: clone(request2),
-              signal: request2.signal,
-              size: request2.size,
-              referrer: request2.referrer,
-              referrerPolicy: request2.referrerPolicy
+              headers: new Headers(request3.headers),
+              follow: request3.follow,
+              counter: request3.counter + 1,
+              agent: request3.agent,
+              compress: request3.compress,
+              method: request3.method,
+              body: clone(request3),
+              signal: request3.signal,
+              size: request3.size,
+              referrer: request3.referrer,
+              referrerPolicy: request3.referrerPolicy
             };
-            if (!isDomainOrSubdomain(request2.url, locationURL) || !isSameProtocol(request2.url, locationURL)) {
+            if (!isDomainOrSubdomain(request3.url, locationURL) || !isSameProtocol(request3.url, locationURL)) {
               for (const name of ["authorization", "www-authenticate", "cookie", "cookie2"]) {
                 requestOptions.headers.delete(name);
               }
             }
-            if (response_.statusCode !== 303 && request2.body && options_.body instanceof import_node_stream2.default.Readable) {
+            if (response_.statusCode !== 303 && request3.body && options_.body instanceof import_node_stream2.default.Readable) {
               reject(new FetchError("Cannot follow redirect with body being a readable stream", "unsupported-redirect"));
               finalize();
               return;
             }
-            if (response_.statusCode === 303 || (response_.statusCode === 301 || response_.statusCode === 302) && request2.method === "POST") {
+            if (response_.statusCode === 303 || (response_.statusCode === 301 || response_.statusCode === 302) && request3.method === "POST") {
               requestOptions.method = "GET";
               requestOptions.body = void 0;
               requestOptions.headers.delete("content-length");
@@ -57141,7 +61630,7 @@ async function fetch(url, options_) {
             return;
           }
           default:
-            return reject(new TypeError(`Redirect option '${request2.redirect}' is not a valid value of RequestRedirect`));
+            return reject(new TypeError(`Redirect option '${request3.redirect}' is not a valid value of RequestRedirect`));
         }
       }
       if (signal) {
@@ -57158,16 +61647,16 @@ async function fetch(url, options_) {
         response_.on("aborted", abortAndFinalize);
       }
       const responseOptions = {
-        url: request2.url,
+        url: request3.url,
         status: response_.statusCode,
         statusText: response_.statusMessage,
         headers,
-        size: request2.size,
-        counter: request2.counter,
-        highWaterMark: request2.highWaterMark
+        size: request3.size,
+        counter: request3.counter,
+        highWaterMark: request3.highWaterMark
       };
       const codings = headers.get("Content-Encoding");
-      if (!request2.compress || request2.method === "HEAD" || codings === null || response_.statusCode === 204 || response_.statusCode === 304) {
+      if (!request3.compress || request3.method === "HEAD" || codings === null || response_.statusCode === 204 || response_.statusCode === 304) {
         response = new Response(body, responseOptions);
         resolve(response);
         return;
@@ -57230,19 +61719,19 @@ async function fetch(url, options_) {
       response = new Response(body, responseOptions);
       resolve(response);
     });
-    writeToStream(request_, request2).catch(reject);
+    writeToStream(request_, request3).catch(reject);
   });
 }
-function fixResponseChunkedTransferBadEnding(request2, errorCallback) {
+function fixResponseChunkedTransferBadEnding(request3, errorCallback) {
   const LAST_CHUNK = import_node_buffer2.Buffer.from("0\r\n\r\n");
   let isChunkedTransfer = false;
   let properLastChunkReceived = false;
   let previousChunk;
-  request2.on("response", (response) => {
+  request3.on("response", (response) => {
     const { headers } = response;
     isChunkedTransfer = headers["transfer-encoding"] === "chunked" && !headers["content-length"];
   });
-  request2.on("socket", (socket) => {
+  request3.on("socket", (socket) => {
     const onSocketClose = () => {
       if (isChunkedTransfer && !properLastChunkReceived) {
         const error = new Error("Premature close");
@@ -57259,7 +61748,7 @@ function fixResponseChunkedTransferBadEnding(request2, errorCallback) {
     };
     socket.prependListener("close", onSocketClose);
     socket.on("data", onData);
-    request2.on("close", () => {
+    request3.on("close", () => {
       socket.removeListener("close", onSocketClose);
       socket.removeListener("data", onData);
     });
@@ -57268,7 +61757,7 @@ function fixResponseChunkedTransferBadEnding(request2, errorCallback) {
 
 // src/get-octokit.ts
 var getOctokit = ({ ghToken }) => {
-  const OctokitWithPlugins = Octokit2.plugin(paginateGraphql);
+  const OctokitWithPlugins = Octokit3.plugin(paginateGraphQL);
   return new OctokitWithPlugins({ auth: ghToken, request: { fetch } });
 };
 
diff --git a/eslint.config.js b/eslint.config.js
index 0ba2f27..f3bad51 100644
--- a/eslint.config.js
+++ b/eslint.config.js
@@ -1,19 +1,7 @@
 import tsEslint from 'typescript-eslint'
-import path from 'node:path'
-import { fileURLToPath } from 'node:url'
 import stylistic from '@stylistic/eslint-plugin'
 import js from '@eslint/js'
 
-import { FlatCompat } from '@eslint/eslintrc'
-
-const __filename = fileURLToPath(import.meta.url)
-const __dirname = path.dirname(__filename)
-
-const compat = new FlatCompat({
-  baseDirectory: __dirname,
-  recommendedConfig: js.configs.recommended,
-})
-
 const recommendedConfig = [
   {
     files: ['**/*.ts', '**/*.tsx'],
@@ -21,7 +9,7 @@ const recommendedConfig = [
     languageOptions: {
       ...tsEslint.configs.base.languageOptions,
       parserOptions: {
-        EXPERIMENTAL_useProjectService: true,
+        projectService: true,
         sourceType: 'module',
       },
     },
@@ -49,13 +37,6 @@ export default [
   ...recommendedConfig,
   stylistic.configs['recommended-flat'],
   js.configs.recommended,
-  ...compat
-    .config({
-      extends: ['prettier'],
-    })
-    .map(config => ({
-      ...config,
-    })),
   {
     files: ['**/*.ts', '**/*.tsx', '**/*.js', '**/*.jsx'],
     rules: {
diff --git a/package.json b/package.json
index 53f2c73..e86156c 100644
--- a/package.json
+++ b/package.json
@@ -10,7 +10,7 @@
     "package": "tsup src/main.ts --clean",
     "test": "jest --passWithNoTests",
     "generate": "graphql-codegen-esm --config codegen.ts",
-    "all": "bun run type-check && bun run format-check && bun run lint && bun run package && bun run test",
+    "all": "bun run type-check && bun run format-check && bun run lint && bun run package",
     "semantic-release": "semantic-release",
     "prepare": "husky"
   },
@@ -29,45 +29,44 @@
   "author": "Balvajs",
   "license": "MIT",
   "devDependencies": {
-    "@actions/core": "1.10.1",
+    "@actions/core": "1.11.1",
     "@actions/github": "6.0.0",
     "@eslint/eslintrc": "3.1.0",
-    "@eslint/js": "9.4.0",
-    "@graphql-codegen/add": "5.0.2",
-    "@graphql-codegen/cli": "5.0.2",
+    "@eslint/js": "9.14.0",
+    "@graphql-codegen/add": "5.0.3",
+    "@graphql-codegen/cli": "5.0.3",
     "@graphql-codegen/near-operation-file-preset": "3.0.0",
-    "@graphql-codegen/typescript": "4.0.7",
-    "@graphql-codegen/typescript-operations": "4.2.1",
-    "@octokit/action": "6.0.7",
-    "@octokit/graphql": "7.0.2",
-    "@octokit/graphql-schema": "14.58.0",
-    "@octokit/plugin-paginate-graphql": "4.0.1",
-    "@semantic-release/commit-analyzer": "12.0.0",
+    "@graphql-codegen/typescript": "4.1.1",
+    "@graphql-codegen/typescript-operations": "4.3.1",
+    "@octokit/action": "7.0.0",
+    "@octokit/graphql": "8.1.1",
+    "@octokit/graphql-schema": "15.25.0",
+    "@octokit/plugin-paginate-graphql": "5.2.4",
+    "@semantic-release/commit-analyzer": "13.0.0",
     "@semantic-release/git": "10.0.1",
-    "@semantic-release/release-notes-generator": "13.0.0",
-    "@stylistic/eslint-plugin": "1.8.1",
-    "@swc/core": "1.5.27",
-    "@swc/jest": "0.2.36",
-    "@types/jest": "29.5.12",
-    "@types/node": "20.12.6",
+    "@semantic-release/release-notes-generator": "14.0.1",
+    "@stylistic/eslint-plugin": "2.10.1",
+    "@swc/core": "1.9.1",
+    "@swc/jest": "0.2.37",
+    "@types/jest": "29.5.14",
+    "@types/node": "22.9.0",
     "chalk": "5.3.0",
     "codeowners": "5.1.1",
-    "dayjs": "1.11.11",
-    "eslint": "9.4.0",
-    "eslint-config-prettier": "9.1.0",
-    "graphql": "16.8.1",
-    "husky": "9.0.11",
+    "dayjs": "1.11.13",
+    "eslint": "9.14.0",
+    "graphql": "16.9.0",
+    "husky": "9.1.6",
     "jest": "29.7.0",
-    "lint-staged": "15.2.5",
-    "minimatch": "9.0.4",
+    "lint-staged": "15.2.10",
+    "minimatch": "10.0.1",
     "node-fetch": "3.3.2",
-    "prettier": "3.3.1",
-    "semantic-release": "23.1.1",
+    "prettier": "3.3.3",
+    "semantic-release": "24.2.0",
     "semantic-release-major-tag": "0.3.2",
-    "simple-git": "3.24.0",
-    "ts-node": "10.9.2",
-    "tsup": "8.0.2",
-    "typescript": "5.4.5",
-    "typescript-eslint": "7.12.0"
+    "simple-git": "3.27.0",
+    "ts-node-dev": "2.0.0",
+    "tsup": "8.3.5",
+    "typescript": "5.6.3",
+    "typescript-eslint": "8.13.0"
   }
 }
diff --git a/src/base-graphql-types.ts b/src/base-graphql-types.ts
index a3a6600..68d3777 100644
--- a/src/base-graphql-types.ts
+++ b/src/base-graphql-types.ts
@@ -1,5 +1,5 @@
-import type { Maybe } from '@octokit/graphql-schema/schema.js'
+import type { Maybe } from '@octokit/graphql-schema'
 
-export * from '@octokit/graphql-schema/schema.js'
+export * from '@octokit/graphql-schema'
 
 export type InputMaybe = Maybe
diff --git a/src/get-head-diff-since-review.ts b/src/get-head-diff-since-review.ts
index 735e101..d7ec68b 100644
--- a/src/get-head-diff-since-review.ts
+++ b/src/get-head-diff-since-review.ts
@@ -36,7 +36,7 @@ export const getHeadDiffSinceReview = async ({
   // find if files from intersectionFiles changed between head and review associated commit relatively to base branch
   await Promise.all(
     intersectionFiles.map(async file => {
-      const fileRenameMatch = file.match(fileRenameRegex)
+      const fileRenameMatch = fileRenameRegex.exec(file)
 
       if (fileRenameMatch) {
         const path1 = normalize(
diff --git a/src/get-octokit.ts b/src/get-octokit.ts
index a7939b1..61d4b96 100644
--- a/src/get-octokit.ts
+++ b/src/get-octokit.ts
@@ -1,8 +1,8 @@
 import { Octokit } from '@octokit/action'
-import { paginateGraphql } from '@octokit/plugin-paginate-graphql'
+import { paginateGraphQL } from '@octokit/plugin-paginate-graphql'
 import fetch from 'node-fetch'
 
 export const getOctokit = ({ ghToken }: { ghToken: string }) => {
-  const OctokitWithPlugins = Octokit.plugin(paginateGraphql)
+  const OctokitWithPlugins = Octokit.plugin(paginateGraphQL)
   return new OctokitWithPlugins({ auth: ghToken, request: { fetch } })
 }