diff --git a/action.yml b/action.yml index 80cc50c2..f9688eab 100644 --- a/action.yml +++ b/action.yml @@ -104,7 +104,7 @@ outputs: description: 'The URL for uploading assets to the release.' runs: using: 'node12' - main: 'lib/Main.js' + main: 'dist/index.js' branding: icon: 'tag' color: 'gray-dark' diff --git a/dist/index.js b/dist/index.js new file mode 100644 index 00000000..3b7ffbf6 --- /dev/null +++ b/dist/index.js @@ -0,0 +1,10944 @@ +require('./sourcemap-register.js');/******/ (() => { // webpackBootstrap +/******/ var __webpack_modules__ = ({ + +/***/ 3335: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Action = void 0; +const GithubError_1 = __nccwpck_require__(7433); +class Action { + constructor(inputs, outputs, releases, uploader, artifactDestroyer) { + this.inputs = inputs; + this.outputs = outputs; + this.releases = releases; + this.uploader = uploader; + this.artifactDestroyer = artifactDestroyer; + } + perform() { + return __awaiter(this, void 0, void 0, function* () { + const releaseResponse = yield this.createOrUpdateRelease(); + const releaseData = releaseResponse.data; + const releaseId = releaseData.id; + const uploadUrl = releaseData.upload_url; + if (this.inputs.removeArtifacts) { + yield this.artifactDestroyer.destroyArtifacts(releaseId); + } + const artifacts = this.inputs.artifacts; + if (artifacts.length > 0) { + yield this.uploader.uploadArtifacts(artifacts, releaseId, uploadUrl); + } + this.outputs.applyReleaseData(releaseData); + }); + } + createOrUpdateRelease() { + return __awaiter(this, void 0, void 0, function* () { + if (this.inputs.allowUpdates) { + let getResponse; + try { + getResponse = yield this.releases.getByTag(this.inputs.tag); + } + catch (error) { + return yield this.checkForMissingReleaseError(error); + } + return yield this.updateRelease(getResponse.data.id); + } + else { + return yield this.createRelease(); + } + }); + } + checkForMissingReleaseError(error) { + return __awaiter(this, void 0, void 0, function* () { + if (Action.noPublishedRelease(error)) { + return yield this.updateDraftOrCreateRelease(); + } + else { + throw error; + } + }); + } + updateRelease(id) { + return __awaiter(this, void 0, void 0, function* () { + return yield this.releases.update(id, this.inputs.tag, this.inputs.updatedReleaseBody, this.inputs.commit, this.inputs.discussionCategory, this.inputs.draft, this.inputs.updatedReleaseName, this.inputs.updatedPrerelease); + }); + } + static noPublishedRelease(error) { + const githubError = new GithubError_1.GithubError(error); + return githubError.status == 404; + } + updateDraftOrCreateRelease() { + return __awaiter(this, void 0, void 0, function* () { + const draftReleaseId = yield this.findMatchingDraftReleaseId(); + if (draftReleaseId) { + return yield this.updateRelease(draftReleaseId); + } + else { + return yield this.createRelease(); + } + }); + } + findMatchingDraftReleaseId() { + return __awaiter(this, void 0, void 0, function* () { + const tag = this.inputs.tag; + const response = yield this.releases.listReleases(); + const releases = response.data; + const draftRelease = releases.find(release => release.draft && release.tag_name == tag); + return draftRelease === null || draftRelease === void 0 ? void 0 : draftRelease.id; + }); + } + createRelease() { + return __awaiter(this, void 0, void 0, function* () { + return yield this.releases.create(this.inputs.tag, this.inputs.createdReleaseBody, this.inputs.commit, this.inputs.discussionCategory, this.inputs.draft, this.inputs.createdReleaseName, this.inputs.createdPrerelease); + }); + } +} +exports.Action = Action; + + +/***/ }), + +/***/ 8568: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Artifact = void 0; +const path_1 = __nccwpck_require__(1017); +const fs_1 = __nccwpck_require__(7147); +class Artifact { + constructor(path, contentType = "raw") { + this.path = path; + this.name = (0, path_1.basename)(path); + this.contentType = contentType; + } + get contentLength() { + return (0, fs_1.statSync)(this.path).size; + } + readFile() { + return (0, fs_1.readFileSync)(this.path); + } +} +exports.Artifact = Artifact; + + +/***/ }), + +/***/ 1770: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.GithubArtifactDestroyer = void 0; +const core = __importStar(__nccwpck_require__(2559)); +class GithubArtifactDestroyer { + constructor(releases) { + this.releases = releases; + } + destroyArtifacts(releaseId) { + return __awaiter(this, void 0, void 0, function* () { + const releaseAssets = yield this.releases.listArtifactsForRelease(releaseId); + for (const artifact of releaseAssets) { + const asset = artifact; + core.debug(`Deleting existing artifact ${artifact.name}...`); + yield this.releases.deleteArtifact(asset.id); + } + }); + } +} +exports.GithubArtifactDestroyer = GithubArtifactDestroyer; + + +/***/ }), + +/***/ 8924: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.FileArtifactGlobber = void 0; +const core = __importStar(__nccwpck_require__(2559)); +const Globber_1 = __nccwpck_require__(8259); +const Artifact_1 = __nccwpck_require__(8568); +const untildify_1 = __importDefault(__nccwpck_require__(6732)); +const ArtifactPathValidator_1 = __nccwpck_require__(7818); +class FileArtifactGlobber { + constructor(globber = new Globber_1.FileGlobber()) { + this.globber = globber; + } + globArtifactString(artifact, contentType, errorsFailBuild) { + const split = /[,\n]/; + return artifact.split(split) + .map(path => path.trimStart()) + .map(path => FileArtifactGlobber.expandPath(path)) + .map(pattern => this.globPattern(pattern, errorsFailBuild)) + .map((globResult) => FileArtifactGlobber.validatePattern(errorsFailBuild, globResult[1], globResult[0])) + .reduce((accumulated, current) => accumulated.concat(current)) + .map(path => new Artifact_1.Artifact(path, contentType)); + } + globPattern(pattern, errorsFailBuild) { + const paths = this.globber.glob(pattern); + if (paths.length == 0) { + if (errorsFailBuild) { + FileArtifactGlobber.throwGlobError(pattern); + } + else { + FileArtifactGlobber.reportGlobWarning(pattern); + } + } + return [pattern, paths]; + } + static validatePattern(errorsFailBuild, paths, pattern) { + const validator = new ArtifactPathValidator_1.ArtifactPathValidator(errorsFailBuild, paths, pattern); + return validator.validate(); + } + static reportGlobWarning(pattern) { + core.warning(`Artifact pattern :${pattern} did not match any files`); + } + static throwGlobError(pattern) { + throw Error(`Artifact pattern :${pattern} did not match any files`); + } + static expandPath(path) { + return (0, untildify_1.default)(path); + } +} +exports.FileArtifactGlobber = FileArtifactGlobber; + + +/***/ }), + +/***/ 7818: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.ArtifactPathValidator = void 0; +const core = __importStar(__nccwpck_require__(2559)); +const fs_1 = __nccwpck_require__(7147); +class ArtifactPathValidator { + constructor(errorsFailBuild, paths, pattern) { + this.paths = paths; + this.pattern = pattern; + this.errorsFailBuild = errorsFailBuild; + } + validate() { + this.verifyPathsNotEmpty(); + return this.paths.filter((path) => this.verifyNotDirectory(path)); + } + verifyPathsNotEmpty() { + if (this.paths.length == 0) { + const message = `Artifact pattern:${this.pattern} did not match any files`; + this.reportError(message); + } + } + verifyNotDirectory(path) { + const isDir = (0, fs_1.statSync)(path).isDirectory(); + if (isDir) { + const message = `Artifact is a directory:${path}. Directories can not be uploaded to a release.`; + this.reportError(message); + } + return !isDir; + } + reportError(message) { + if (this.errorsFailBuild) { + throw Error(message); + } + else { + core.warning(message); + } + } +} +exports.ArtifactPathValidator = ArtifactPathValidator; + + +/***/ }), + +/***/ 972: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.GithubArtifactUploader = void 0; +const core = __importStar(__nccwpck_require__(2559)); +class GithubArtifactUploader { + constructor(releases, replacesExistingArtifacts = true, throwsUploadErrors = false) { + this.releases = releases; + this.replacesExistingArtifacts = replacesExistingArtifacts; + this.throwsUploadErrors = throwsUploadErrors; + } + uploadArtifacts(artifacts, releaseId, uploadUrl) { + return __awaiter(this, void 0, void 0, function* () { + if (this.replacesExistingArtifacts) { + yield this.deleteUpdatedArtifacts(artifacts, releaseId); + } + for (const artifact of artifacts) { + yield this.uploadArtifact(artifact, releaseId, uploadUrl); + } + }); + } + uploadArtifact(artifact, releaseId, uploadUrl, retry = 3) { + return __awaiter(this, void 0, void 0, function* () { + try { + core.debug(`Uploading artifact ${artifact.name}...`); + yield this.releases.uploadArtifact(uploadUrl, artifact.contentLength, artifact.contentType, artifact.readFile(), artifact.name, releaseId); + } + catch (error) { + if (error.status >= 500 && retry > 0) { + core.warning(`Failed to upload artifact ${artifact.name}. ${error.message}. Retrying...`); + yield this.uploadArtifact(artifact, releaseId, uploadUrl, retry - 1); + } + else { + if (this.throwsUploadErrors) { + throw Error(`Failed to upload artifact ${artifact.name}. ${error.message}.`); + } + else { + core.warning(`Failed to upload artifact ${artifact.name}. ${error.message}.`); + } + } + } + }); + } + deleteUpdatedArtifacts(artifacts, releaseId) { + return __awaiter(this, void 0, void 0, function* () { + const releaseAssets = yield this.releases.listArtifactsForRelease(releaseId); + const assetByName = {}; + releaseAssets.forEach(asset => { + assetByName[asset.name] = asset; + }); + for (const artifact of artifacts) { + const asset = assetByName[artifact.name]; + if (asset) { + core.debug(`Deleting existing artifact ${artifact.name}...`); + yield this.releases.deleteArtifact(asset.id); + } + } + }); + } +} +exports.GithubArtifactUploader = GithubArtifactUploader; + + +/***/ }), + +/***/ 7433: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.GithubError = void 0; +const GithubErrorDetail_1 = __nccwpck_require__(649); +class GithubError { + constructor(error) { + this.error = error; + this.githubErrors = this.generateGithubErrors(); + } + generateGithubErrors() { + const errors = this.error.errors; + if (errors instanceof Array) { + return errors.map((err) => new GithubErrorDetail_1.GithubErrorDetail(err)); + } + else { + return []; + } + } + get status() { + return this.error.status; + } + hasErrorWithCode(code) { + return this.githubErrors.some((err) => err.code == code); + } + toString() { + const message = this.error.message; + const errors = this.githubErrors; + const status = this.status; + if (errors.length > 0) { + return `Error ${status}: ${message}\nErrors:\n${this.errorBulletedList(errors)}`; + } + else { + return `Error ${status}: ${message}`; + } + } + errorBulletedList(errors) { + return errors.map((err) => `- ${err}`).join("\n"); + } +} +exports.GithubError = GithubError; + + +/***/ }), + +/***/ 649: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.GithubErrorDetail = void 0; +class GithubErrorDetail { + constructor(error) { + this.error = error; + } + get code() { + return this.error.code; + } + toString() { + const code = this.error.code; + switch (code) { + case 'missing': + return this.missingResourceMessage(); + case 'missing_field': + return this.missingFieldMessage(); + case 'invalid': + return this.invalidFieldMessage(); + case 'already_exists': + return this.resourceAlreadyExists(); + default: + return this.customErrorMessage(); + } + } + customErrorMessage() { + const message = this.error.message; + const documentation = this.error.documentation_url; + let documentationMessage; + if (documentation) { + documentationMessage = `\nPlease see ${documentation}.`; + } + else { + documentationMessage = ""; + } + return `${message}${documentationMessage}`; + } + invalidFieldMessage() { + const resource = this.error.resource; + const field = this.error.field; + return `The ${field} field on ${resource} is an invalid format.`; + } + missingResourceMessage() { + const resource = this.error.resource; + return `${resource} does not exist.`; + } + missingFieldMessage() { + const resource = this.error.resource; + const field = this.error.field; + return `The ${field} field on ${resource} is missing.`; + } + resourceAlreadyExists() { + const resource = this.error.resource; + return `${resource} already exists.`; + } +} +exports.GithubErrorDetail = GithubErrorDetail; + + +/***/ }), + +/***/ 8259: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.FileGlobber = void 0; +const glob_1 = __nccwpck_require__(6691); +class FileGlobber { + glob(pattern) { + return new glob_1.GlobSync(pattern, { mark: true }).found; + } +} +exports.FileGlobber = FileGlobber; + + +/***/ }), + +/***/ 8218: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.CoreInputs = void 0; +const core = __importStar(__nccwpck_require__(2559)); +const fs_1 = __nccwpck_require__(7147); +class CoreInputs { + constructor(artifactGlobber, context) { + this.artifactGlobber = artifactGlobber; + this.context = context; + } + get allowUpdates() { + const allow = core.getInput('allowUpdates'); + return allow == 'true'; + } + get artifacts() { + let artifacts = core.getInput('artifacts'); + if (!artifacts) { + artifacts = core.getInput('artifact'); + } + if (artifacts) { + let contentType = core.getInput('artifactContentType'); + if (!contentType) { + contentType = 'raw'; + } + return this.artifactGlobber + .globArtifactString(artifacts, contentType, this.artifactErrorsFailBuild); + } + return []; + } + get artifactErrorsFailBuild() { + const allow = core.getInput('artifactErrorsFailBuild'); + return allow == 'true'; + } + get createdReleaseBody() { + if (CoreInputs.omitBody) + return undefined; + return this.body; + } + static get omitBody() { + return core.getInput('omitBody') == 'true'; + } + get body() { + const body = core.getInput('body'); + if (body) { + return body; + } + const bodyFile = core.getInput('bodyFile'); + if (bodyFile) { + return this.stringFromFile(bodyFile); + } + return ''; + } + get commit() { + return core.getInput('commit'); + } + get createdReleaseName() { + if (CoreInputs.omitName) + return undefined; + return this.name; + } + get discussionCategory() { + const category = core.getInput('discussionCategory'); + if (category) { + return category; + } + return undefined; + } + static get omitName() { + return core.getInput('omitName') == 'true'; + } + get name() { + const name = core.getInput('name'); + if (name) { + return name; + } + return this.tag; + } + get draft() { + const draft = core.getInput('draft'); + return draft == 'true'; + } + get owner() { + let owner = core.getInput('owner'); + if (owner) { + return owner; + } + return this.context.repo.owner; + } + get createdPrerelease() { + const preRelease = core.getInput('prerelease'); + return preRelease == 'true'; + } + static get omitPrereleaseDuringUpdate() { + return core.getInput('omitPrereleaseDuringUpdate') == 'true'; + } + get updatedPrerelease() { + if (CoreInputs.omitPrereleaseDuringUpdate) + return undefined; + return this.createdPrerelease; + } + get removeArtifacts() { + const removes = core.getInput('removeArtifacts'); + return removes == 'true'; + } + get replacesArtifacts() { + const replaces = core.getInput('replacesArtifacts'); + return replaces == 'true'; + } + get repo() { + let repo = core.getInput('repo'); + if (repo) { + return repo; + } + return this.context.repo.repo; + } + get tag() { + const tag = core.getInput('tag'); + if (tag) { + return tag; + } + const ref = this.context.ref; + const tagPath = "refs/tags/"; + if (ref && ref.startsWith(tagPath)) { + return ref.substr(tagPath.length, ref.length); + } + throw Error("No tag found in ref or input!"); + } + get token() { + return core.getInput('token', { required: true }); + } + get updatedReleaseBody() { + if (CoreInputs.omitBody || CoreInputs.omitBodyDuringUpdate) + return undefined; + return this.body; + } + static get omitBodyDuringUpdate() { + return core.getInput('omitBodyDuringUpdate') == 'true'; + } + get updatedReleaseName() { + if (CoreInputs.omitName || CoreInputs.omitNameDuringUpdate) + return undefined; + return this.name; + } + static get omitNameDuringUpdate() { + return core.getInput('omitNameDuringUpdate') == 'true'; + } + stringFromFile(path) { + return (0, fs_1.readFileSync)(path, 'utf-8'); + } +} +exports.CoreInputs = CoreInputs; + + +/***/ }), + +/***/ 6650: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.CoreOutputs = void 0; +const core = __importStar(__nccwpck_require__(2559)); +class CoreOutputs { + applyReleaseData(releaseData) { + core.setOutput('id', releaseData.id); + core.setOutput('html_url', releaseData.html_url); + core.setOutput('upload_url', releaseData.upload_url); + } +} +exports.CoreOutputs = CoreOutputs; + + +/***/ }), + +/***/ 184: +/***/ (function(__unused_webpack_module, exports) { + +"use strict"; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.GithubReleases = void 0; +class GithubReleases { + constructor(inputs, git) { + this.inputs = inputs; + this.git = git; + } + create(tag, body, commitHash, discussionCategory, draft, name, prerelease) { + return __awaiter(this, void 0, void 0, function* () { + // noinspection TypeScriptValidateJSTypes + return this.git.rest.repos.createRelease({ + body: body, + name: name, + discussion_category_name: discussionCategory, + draft: draft, + owner: this.inputs.owner, + prerelease: prerelease, + repo: this.inputs.repo, + target_commitish: commitHash, + tag_name: tag + }); + }); + } + deleteArtifact(assetId) { + return __awaiter(this, void 0, void 0, function* () { + return this.git.rest.repos.deleteReleaseAsset({ + asset_id: assetId, + owner: this.inputs.owner, + repo: this.inputs.repo + }); + }); + } + getByTag(tag) { + return __awaiter(this, void 0, void 0, function* () { + return this.git.rest.repos.getReleaseByTag({ + owner: this.inputs.owner, + repo: this.inputs.repo, + tag: tag + }); + }); + } + listArtifactsForRelease(releaseId) { + return __awaiter(this, void 0, void 0, function* () { + return this.git.paginate(this.git.rest.repos.listReleaseAssets, { + owner: this.inputs.owner, + release_id: releaseId, + repo: this.inputs.repo + }); + }); + } + listReleases() { + return __awaiter(this, void 0, void 0, function* () { + return this.git.rest.repos.listReleases({ + owner: this.inputs.owner, + repo: this.inputs.repo + }); + }); + } + update(id, tag, body, commitHash, discussionCategory, draft, name, prerelease) { + return __awaiter(this, void 0, void 0, function* () { + // noinspection TypeScriptValidateJSTypes + return this.git.rest.repos.updateRelease({ + release_id: id, + body: body, + name: name, + discussion_category_name: discussionCategory, + draft: draft, + owner: this.inputs.owner, + prerelease: prerelease, + repo: this.inputs.repo, + target_commitish: commitHash, + tag_name: tag + }); + }); + } + uploadArtifact(assetUrl, contentLength, contentType, file, name, releaseId) { + return __awaiter(this, void 0, void 0, function* () { + return this.git.rest.repos.uploadReleaseAsset({ + url: assetUrl, + headers: { + "content-length": contentLength, + "content-type": contentType + }, + data: file, + name: name, + owner: this.inputs.owner, + release_id: releaseId, + repo: this.inputs.repo + }); + }); + } +} +exports.GithubReleases = GithubReleases; + + +/***/ }), + +/***/ 1769: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const github = __importStar(__nccwpck_require__(1089)); +const core = __importStar(__nccwpck_require__(2559)); +const Inputs_1 = __nccwpck_require__(8218); +const Releases_1 = __nccwpck_require__(184); +const Action_1 = __nccwpck_require__(3335); +const ArtifactUploader_1 = __nccwpck_require__(972); +const ArtifactGlobber_1 = __nccwpck_require__(8924); +const GithubError_1 = __nccwpck_require__(7433); +const Outputs_1 = __nccwpck_require__(6650); +const ArtifactDestroyer_1 = __nccwpck_require__(1770); +function run() { + return __awaiter(this, void 0, void 0, function* () { + try { + const action = createAction(); + yield action.perform(); + } + catch (error) { + const githubError = new GithubError_1.GithubError(error); + core.setFailed(githubError.toString()); + } + }); +} +function createAction() { + const token = core.getInput('token'); + const context = github.context; + const git = github.getOctokit(token); + const globber = new ArtifactGlobber_1.FileArtifactGlobber(); + const inputs = new Inputs_1.CoreInputs(globber, context); + const outputs = new Outputs_1.CoreOutputs(); + const releases = new Releases_1.GithubReleases(inputs, git); + const uploader = new ArtifactUploader_1.GithubArtifactUploader(releases, inputs.replacesArtifacts, inputs.artifactErrorsFailBuild); + const artifactDestroyer = new ArtifactDestroyer_1.GithubArtifactDestroyer(releases); + return new Action_1.Action(inputs, outputs, releases, uploader, artifactDestroyer); +} +run(); + + +/***/ }), + +/***/ 9117: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.issue = exports.issueCommand = void 0; +const os = __importStar(__nccwpck_require__(2037)); +const utils_1 = __nccwpck_require__(9046); +/** + * Commands + * + * Command Format: + * ::name key=value,key=value::message + * + * Examples: + * ::warning::This is the message + * ::set-env name=MY_VAR::some value + */ +function issueCommand(command, properties, message) { + const cmd = new Command(command, properties, message); + process.stdout.write(cmd.toString() + os.EOL); +} +exports.issueCommand = issueCommand; +function issue(name, message = '') { + issueCommand(name, {}, message); +} +exports.issue = issue; +const CMD_STRING = '::'; +class Command { + constructor(command, properties, message) { + if (!command) { + command = 'missing.command'; + } + this.command = command; + this.properties = properties; + this.message = message; + } + toString() { + let cmdStr = CMD_STRING + this.command; + if (this.properties && Object.keys(this.properties).length > 0) { + cmdStr += ' '; + let first = true; + for (const key in this.properties) { + if (this.properties.hasOwnProperty(key)) { + const val = this.properties[key]; + if (val) { + if (first) { + first = false; + } + else { + cmdStr += ','; + } + cmdStr += `${key}=${escapeProperty(val)}`; + } + } + } + } + cmdStr += `${CMD_STRING}${escapeData(this.message)}`; + return cmdStr; + } +} +function escapeData(s) { + return utils_1.toCommandValue(s) + .replace(/%/g, '%25') + .replace(/\r/g, '%0D') + .replace(/\n/g, '%0A'); +} +function escapeProperty(s) { + return utils_1.toCommandValue(s) + .replace(/%/g, '%25') + .replace(/\r/g, '%0D') + .replace(/\n/g, '%0A') + .replace(/:/g, '%3A') + .replace(/,/g, '%2C'); +} +//# sourceMappingURL=command.js.map + +/***/ }), + +/***/ 2559: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getIDToken = exports.getState = exports.saveState = exports.group = exports.endGroup = exports.startGroup = exports.info = exports.notice = exports.warning = exports.error = exports.debug = exports.isDebug = exports.setFailed = exports.setCommandEcho = exports.setOutput = exports.getBooleanInput = exports.getMultilineInput = exports.getInput = exports.addPath = exports.setSecret = exports.exportVariable = exports.ExitCode = void 0; +const command_1 = __nccwpck_require__(9117); +const file_command_1 = __nccwpck_require__(2028); +const utils_1 = __nccwpck_require__(9046); +const os = __importStar(__nccwpck_require__(2037)); +const path = __importStar(__nccwpck_require__(1017)); +const oidc_utils_1 = __nccwpck_require__(2853); +/** + * The code to exit an action + */ +var ExitCode; +(function (ExitCode) { + /** + * A code indicating that the action was successful + */ + ExitCode[ExitCode["Success"] = 0] = "Success"; + /** + * A code indicating that the action was a failure + */ + ExitCode[ExitCode["Failure"] = 1] = "Failure"; +})(ExitCode = exports.ExitCode || (exports.ExitCode = {})); +//----------------------------------------------------------------------- +// Variables +//----------------------------------------------------------------------- +/** + * Sets env variable for this action and future actions in the job + * @param name the name of the variable to set + * @param val the value of the variable. Non-string values will be converted to a string via JSON.stringify + */ +// eslint-disable-next-line @typescript-eslint/no-explicit-any +function exportVariable(name, val) { + const convertedVal = utils_1.toCommandValue(val); + process.env[name] = convertedVal; + const filePath = process.env['GITHUB_ENV'] || ''; + if (filePath) { + const delimiter = '_GitHubActionsFileCommandDelimeter_'; + const commandValue = `${name}<<${delimiter}${os.EOL}${convertedVal}${os.EOL}${delimiter}`; + file_command_1.issueCommand('ENV', commandValue); + } + else { + command_1.issueCommand('set-env', { name }, convertedVal); + } +} +exports.exportVariable = exportVariable; +/** + * Registers a secret which will get masked from logs + * @param secret value of the secret + */ +function setSecret(secret) { + command_1.issueCommand('add-mask', {}, secret); +} +exports.setSecret = setSecret; +/** + * Prepends inputPath to the PATH (for this action and future actions) + * @param inputPath + */ +function addPath(inputPath) { + const filePath = process.env['GITHUB_PATH'] || ''; + if (filePath) { + file_command_1.issueCommand('PATH', inputPath); + } + else { + command_1.issueCommand('add-path', {}, inputPath); + } + process.env['PATH'] = `${inputPath}${path.delimiter}${process.env['PATH']}`; +} +exports.addPath = addPath; +/** + * Gets the value of an input. + * Unless trimWhitespace is set to false in InputOptions, the value is also trimmed. + * Returns an empty string if the value is not defined. + * + * @param name name of the input to get + * @param options optional. See InputOptions. + * @returns string + */ +function getInput(name, options) { + const val = process.env[`INPUT_${name.replace(/ /g, '_').toUpperCase()}`] || ''; + if (options && options.required && !val) { + throw new Error(`Input required and not supplied: ${name}`); + } + if (options && options.trimWhitespace === false) { + return val; + } + return val.trim(); +} +exports.getInput = getInput; +/** + * Gets the values of an multiline input. Each value is also trimmed. + * + * @param name name of the input to get + * @param options optional. See InputOptions. + * @returns string[] + * + */ +function getMultilineInput(name, options) { + const inputs = getInput(name, options) + .split('\n') + .filter(x => x !== ''); + return inputs; +} +exports.getMultilineInput = getMultilineInput; +/** + * Gets the input value of the boolean type in the YAML 1.2 "core schema" specification. + * Support boolean input list: `true | True | TRUE | false | False | FALSE` . + * The return value is also in boolean type. + * ref: https://yaml.org/spec/1.2/spec.html#id2804923 + * + * @param name name of the input to get + * @param options optional. See InputOptions. + * @returns boolean + */ +function getBooleanInput(name, options) { + const trueValue = ['true', 'True', 'TRUE']; + const falseValue = ['false', 'False', 'FALSE']; + const val = getInput(name, options); + if (trueValue.includes(val)) + return true; + if (falseValue.includes(val)) + return false; + throw new TypeError(`Input does not meet YAML 1.2 "Core Schema" specification: ${name}\n` + + `Support boolean input list: \`true | True | TRUE | false | False | FALSE\``); +} +exports.getBooleanInput = getBooleanInput; +/** + * Sets the value of an output. + * + * @param name name of the output to set + * @param value value to store. Non-string values will be converted to a string via JSON.stringify + */ +// eslint-disable-next-line @typescript-eslint/no-explicit-any +function setOutput(name, value) { + process.stdout.write(os.EOL); + command_1.issueCommand('set-output', { name }, value); +} +exports.setOutput = setOutput; +/** + * Enables or disables the echoing of commands into stdout for the rest of the step. + * Echoing is disabled by default if ACTIONS_STEP_DEBUG is not set. + * + */ +function setCommandEcho(enabled) { + command_1.issue('echo', enabled ? 'on' : 'off'); +} +exports.setCommandEcho = setCommandEcho; +//----------------------------------------------------------------------- +// Results +//----------------------------------------------------------------------- +/** + * Sets the action status to failed. + * When the action exits it will be with an exit code of 1 + * @param message add error issue message + */ +function setFailed(message) { + process.exitCode = ExitCode.Failure; + error(message); +} +exports.setFailed = setFailed; +//----------------------------------------------------------------------- +// Logging Commands +//----------------------------------------------------------------------- +/** + * Gets whether Actions Step Debug is on or not + */ +function isDebug() { + return process.env['RUNNER_DEBUG'] === '1'; +} +exports.isDebug = isDebug; +/** + * Writes debug message to user log + * @param message debug message + */ +function debug(message) { + command_1.issueCommand('debug', {}, message); +} +exports.debug = debug; +/** + * Adds an error issue + * @param message error issue message. Errors will be converted to string via toString() + * @param properties optional properties to add to the annotation. + */ +function error(message, properties = {}) { + command_1.issueCommand('error', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message); +} +exports.error = error; +/** + * Adds a warning issue + * @param message warning issue message. Errors will be converted to string via toString() + * @param properties optional properties to add to the annotation. + */ +function warning(message, properties = {}) { + command_1.issueCommand('warning', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message); +} +exports.warning = warning; +/** + * Adds a notice issue + * @param message notice issue message. Errors will be converted to string via toString() + * @param properties optional properties to add to the annotation. + */ +function notice(message, properties = {}) { + command_1.issueCommand('notice', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message); +} +exports.notice = notice; +/** + * Writes info to log with console.log. + * @param message info message + */ +function info(message) { + process.stdout.write(message + os.EOL); +} +exports.info = info; +/** + * Begin an output group. + * + * Output until the next `groupEnd` will be foldable in this group + * + * @param name The name of the output group + */ +function startGroup(name) { + command_1.issue('group', name); +} +exports.startGroup = startGroup; +/** + * End an output group. + */ +function endGroup() { + command_1.issue('endgroup'); +} +exports.endGroup = endGroup; +/** + * Wrap an asynchronous function call in a group. + * + * Returns the same type as the function itself. + * + * @param name The name of the group + * @param fn The function to wrap in the group + */ +function group(name, fn) { + return __awaiter(this, void 0, void 0, function* () { + startGroup(name); + let result; + try { + result = yield fn(); + } + finally { + endGroup(); + } + return result; + }); +} +exports.group = group; +//----------------------------------------------------------------------- +// Wrapper action state +//----------------------------------------------------------------------- +/** + * Saves state for current action, the state can only be retrieved by this action's post job execution. + * + * @param name name of the state to store + * @param value value to store. Non-string values will be converted to a string via JSON.stringify + */ +// eslint-disable-next-line @typescript-eslint/no-explicit-any +function saveState(name, value) { + command_1.issueCommand('save-state', { name }, value); +} +exports.saveState = saveState; +/** + * Gets the value of an state set by this action's main execution. + * + * @param name name of the state to get + * @returns string + */ +function getState(name) { + return process.env[`STATE_${name}`] || ''; +} +exports.getState = getState; +function getIDToken(aud) { + return __awaiter(this, void 0, void 0, function* () { + return yield oidc_utils_1.OidcClient.getIDToken(aud); + }); +} +exports.getIDToken = getIDToken; +//# sourceMappingURL=core.js.map + +/***/ }), + +/***/ 2028: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +// For internal use, subject to change. +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.issueCommand = void 0; +// We use any as a valid input type +/* eslint-disable @typescript-eslint/no-explicit-any */ +const fs = __importStar(__nccwpck_require__(7147)); +const os = __importStar(__nccwpck_require__(2037)); +const utils_1 = __nccwpck_require__(9046); +function issueCommand(command, message) { + const filePath = process.env[`GITHUB_${command}`]; + if (!filePath) { + throw new Error(`Unable to find environment variable for file command ${command}`); + } + if (!fs.existsSync(filePath)) { + throw new Error(`Missing file at path: ${filePath}`); + } + fs.appendFileSync(filePath, `${utils_1.toCommandValue(message)}${os.EOL}`, { + encoding: 'utf8' + }); +} +exports.issueCommand = issueCommand; +//# sourceMappingURL=file-command.js.map + +/***/ }), + +/***/ 2853: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.OidcClient = void 0; +const http_client_1 = __nccwpck_require__(338); +const auth_1 = __nccwpck_require__(3642); +const core_1 = __nccwpck_require__(2559); +class OidcClient { + static createHttpClient(allowRetry = true, maxRetry = 10) { + const requestOptions = { + allowRetries: allowRetry, + maxRetries: maxRetry + }; + return new http_client_1.HttpClient('actions/oidc-client', [new auth_1.BearerCredentialHandler(OidcClient.getRequestToken())], requestOptions); + } + static getRequestToken() { + const token = process.env['ACTIONS_ID_TOKEN_REQUEST_TOKEN']; + if (!token) { + throw new Error('Unable to get ACTIONS_ID_TOKEN_REQUEST_TOKEN env variable'); + } + return token; + } + static getIDTokenUrl() { + const runtimeUrl = process.env['ACTIONS_ID_TOKEN_REQUEST_URL']; + if (!runtimeUrl) { + throw new Error('Unable to get ACTIONS_ID_TOKEN_REQUEST_URL env variable'); + } + return runtimeUrl; + } + static getCall(id_token_url) { + var _a; + return __awaiter(this, void 0, void 0, function* () { + const httpclient = OidcClient.createHttpClient(); + const res = yield httpclient + .getJson(id_token_url) + .catch(error => { + throw new Error(`Failed to get ID Token. \n + Error Code : ${error.statusCode}\n + Error Message: ${error.result.message}`); + }); + const id_token = (_a = res.result) === null || _a === void 0 ? void 0 : _a.value; + if (!id_token) { + throw new Error('Response json body do not have ID Token field'); + } + return id_token; + }); + } + static getIDToken(audience) { + return __awaiter(this, void 0, void 0, function* () { + try { + // New ID Token is requested from action service + let id_token_url = OidcClient.getIDTokenUrl(); + if (audience) { + const encodedAudience = encodeURIComponent(audience); + id_token_url = `${id_token_url}&audience=${encodedAudience}`; + } + core_1.debug(`ID token url is ${id_token_url}`); + const id_token = yield OidcClient.getCall(id_token_url); + core_1.setSecret(id_token); + return id_token; + } + catch (error) { + throw new Error(`Error message: ${error.message}`); + } + }); + } +} +exports.OidcClient = OidcClient; +//# sourceMappingURL=oidc-utils.js.map + +/***/ }), + +/***/ 9046: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +// We use any as a valid input type +/* eslint-disable @typescript-eslint/no-explicit-any */ +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.toCommandProperties = exports.toCommandValue = void 0; +/** + * Sanitizes an input into a string so it can be passed into issueCommand safely + * @param input input to sanitize into a string + */ +function toCommandValue(input) { + if (input === null || input === undefined) { + return ''; + } + else if (typeof input === 'string' || input instanceof String) { + return input; + } + return JSON.stringify(input); +} +exports.toCommandValue = toCommandValue; +/** + * + * @param annotationProperties + * @returns The command properties to send with the actual annotation command + * See IssueCommandProperties: https://github.com/actions/runner/blob/main/src/Runner.Worker/ActionCommandManager.cs#L646 + */ +function toCommandProperties(annotationProperties) { + if (!Object.keys(annotationProperties).length) { + return {}; + } + return { + title: annotationProperties.title, + file: annotationProperties.file, + line: annotationProperties.startLine, + endLine: annotationProperties.endLine, + col: annotationProperties.startColumn, + endColumn: annotationProperties.endColumn + }; +} +exports.toCommandProperties = toCommandProperties; +//# sourceMappingURL=utils.js.map + +/***/ }), + +/***/ 7916: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Context = void 0; +const fs_1 = __nccwpck_require__(7147); +const os_1 = __nccwpck_require__(2037); +class Context { + /** + * Hydrate the context from the environment + */ + constructor() { + var _a, _b, _c; + this.payload = {}; + if (process.env.GITHUB_EVENT_PATH) { + if (fs_1.existsSync(process.env.GITHUB_EVENT_PATH)) { + this.payload = JSON.parse(fs_1.readFileSync(process.env.GITHUB_EVENT_PATH, { encoding: 'utf8' })); + } + else { + const path = process.env.GITHUB_EVENT_PATH; + process.stdout.write(`GITHUB_EVENT_PATH ${path} does not exist${os_1.EOL}`); + } + } + this.eventName = process.env.GITHUB_EVENT_NAME; + this.sha = process.env.GITHUB_SHA; + this.ref = process.env.GITHUB_REF; + this.workflow = process.env.GITHUB_WORKFLOW; + this.action = process.env.GITHUB_ACTION; + this.actor = process.env.GITHUB_ACTOR; + this.job = process.env.GITHUB_JOB; + this.runNumber = parseInt(process.env.GITHUB_RUN_NUMBER, 10); + this.runId = parseInt(process.env.GITHUB_RUN_ID, 10); + this.apiUrl = (_a = process.env.GITHUB_API_URL) !== null && _a !== void 0 ? _a : `https://api.github.com`; + this.serverUrl = (_b = process.env.GITHUB_SERVER_URL) !== null && _b !== void 0 ? _b : `https://github.com`; + this.graphqlUrl = (_c = process.env.GITHUB_GRAPHQL_URL) !== null && _c !== void 0 ? _c : `https://api.github.com/graphql`; + } + get issue() { + const payload = this.payload; + return Object.assign(Object.assign({}, this.repo), { number: (payload.issue || payload.pull_request || payload).number }); + } + get repo() { + if (process.env.GITHUB_REPOSITORY) { + const [owner, repo] = process.env.GITHUB_REPOSITORY.split('/'); + return { owner, repo }; + } + if (this.payload.repository) { + return { + owner: this.payload.repository.owner.login, + repo: this.payload.repository.name + }; + } + throw new Error("context.repo requires a GITHUB_REPOSITORY environment variable like 'owner/repo'"); + } +} +exports.Context = Context; +//# sourceMappingURL=context.js.map + +/***/ }), + +/***/ 1089: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getOctokit = exports.context = void 0; +const Context = __importStar(__nccwpck_require__(7916)); +const utils_1 = __nccwpck_require__(2313); +exports.context = new Context.Context(); +/** + * Returns a hydrated octokit ready to use for GitHub Actions + * + * @param token the repo PAT or GITHUB_TOKEN + * @param options other options to set + */ +function getOctokit(token, options) { + return new utils_1.GitHub(utils_1.getOctokitOptions(token, options)); +} +exports.getOctokit = getOctokit; +//# sourceMappingURL=github.js.map + +/***/ }), + +/***/ 1994: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getApiBaseUrl = exports.getProxyAgent = exports.getAuthString = void 0; +const httpClient = __importStar(__nccwpck_require__(338)); +function getAuthString(token, options) { + if (!token && !options.auth) { + throw new Error('Parameter token or opts.auth is required'); + } + else if (token && options.auth) { + throw new Error('Parameters token and opts.auth may not both be specified'); + } + return typeof options.auth === 'string' ? options.auth : `token ${token}`; +} +exports.getAuthString = getAuthString; +function getProxyAgent(destinationUrl) { + const hc = new httpClient.HttpClient(); + return hc.getAgent(destinationUrl); +} +exports.getProxyAgent = getProxyAgent; +function getApiBaseUrl() { + return process.env['GITHUB_API_URL'] || 'https://api.github.com'; +} +exports.getApiBaseUrl = getApiBaseUrl; +//# sourceMappingURL=utils.js.map + +/***/ }), + +/***/ 2313: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getOctokitOptions = exports.GitHub = exports.context = void 0; +const Context = __importStar(__nccwpck_require__(7916)); +const Utils = __importStar(__nccwpck_require__(1994)); +// octokit + plugins +const core_1 = __nccwpck_require__(202); +const plugin_rest_endpoint_methods_1 = __nccwpck_require__(3543); +const plugin_paginate_rest_1 = __nccwpck_require__(3654); +exports.context = new Context.Context(); +const baseUrl = Utils.getApiBaseUrl(); +const defaults = { + baseUrl, + request: { + agent: Utils.getProxyAgent(baseUrl) + } +}; +exports.GitHub = core_1.Octokit.plugin(plugin_rest_endpoint_methods_1.restEndpointMethods, plugin_paginate_rest_1.paginateRest).defaults(defaults); +/** + * Convience function to correctly format Octokit Options to pass into the constructor. + * + * @param token the repo PAT or GITHUB_TOKEN + * @param options other options to set + */ +function getOctokitOptions(token, options) { + const opts = Object.assign({}, options || {}); // Shallow clone - don't mutate the object provided by the caller + // Auth + const auth = Utils.getAuthString(token, opts); + if (auth) { + opts.auth = auth; + } + return opts; +} +exports.getOctokitOptions = getOctokitOptions; +//# sourceMappingURL=utils.js.map + +/***/ }), + +/***/ 3642: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +class BasicCredentialHandler { + constructor(username, password) { + this.username = username; + this.password = password; + } + prepareRequest(options) { + options.headers['Authorization'] = + 'Basic ' + + Buffer.from(this.username + ':' + this.password).toString('base64'); + } + // This handler cannot handle 401 + canHandleAuthentication(response) { + return false; + } + handleAuthentication(httpClient, requestInfo, objs) { + return null; + } +} +exports.BasicCredentialHandler = BasicCredentialHandler; +class BearerCredentialHandler { + constructor(token) { + this.token = token; + } + // currently implements pre-authorization + // TODO: support preAuth = false where it hooks on 401 + prepareRequest(options) { + options.headers['Authorization'] = 'Bearer ' + this.token; + } + // This handler cannot handle 401 + canHandleAuthentication(response) { + return false; + } + handleAuthentication(httpClient, requestInfo, objs) { + return null; + } +} +exports.BearerCredentialHandler = BearerCredentialHandler; +class PersonalAccessTokenCredentialHandler { + constructor(token) { + this.token = token; + } + // currently implements pre-authorization + // TODO: support preAuth = false where it hooks on 401 + prepareRequest(options) { + options.headers['Authorization'] = + 'Basic ' + Buffer.from('PAT:' + this.token).toString('base64'); + } + // This handler cannot handle 401 + canHandleAuthentication(response) { + return false; + } + handleAuthentication(httpClient, requestInfo, objs) { + return null; + } +} +exports.PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHandler; + + +/***/ }), + +/***/ 338: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +const http = __nccwpck_require__(3685); +const https = __nccwpck_require__(5687); +const pm = __nccwpck_require__(8994); +let tunnel; +var HttpCodes; +(function (HttpCodes) { + HttpCodes[HttpCodes["OK"] = 200] = "OK"; + HttpCodes[HttpCodes["MultipleChoices"] = 300] = "MultipleChoices"; + HttpCodes[HttpCodes["MovedPermanently"] = 301] = "MovedPermanently"; + HttpCodes[HttpCodes["ResourceMoved"] = 302] = "ResourceMoved"; + HttpCodes[HttpCodes["SeeOther"] = 303] = "SeeOther"; + HttpCodes[HttpCodes["NotModified"] = 304] = "NotModified"; + HttpCodes[HttpCodes["UseProxy"] = 305] = "UseProxy"; + HttpCodes[HttpCodes["SwitchProxy"] = 306] = "SwitchProxy"; + HttpCodes[HttpCodes["TemporaryRedirect"] = 307] = "TemporaryRedirect"; + HttpCodes[HttpCodes["PermanentRedirect"] = 308] = "PermanentRedirect"; + HttpCodes[HttpCodes["BadRequest"] = 400] = "BadRequest"; + HttpCodes[HttpCodes["Unauthorized"] = 401] = "Unauthorized"; + HttpCodes[HttpCodes["PaymentRequired"] = 402] = "PaymentRequired"; + HttpCodes[HttpCodes["Forbidden"] = 403] = "Forbidden"; + HttpCodes[HttpCodes["NotFound"] = 404] = "NotFound"; + HttpCodes[HttpCodes["MethodNotAllowed"] = 405] = "MethodNotAllowed"; + HttpCodes[HttpCodes["NotAcceptable"] = 406] = "NotAcceptable"; + HttpCodes[HttpCodes["ProxyAuthenticationRequired"] = 407] = "ProxyAuthenticationRequired"; + HttpCodes[HttpCodes["RequestTimeout"] = 408] = "RequestTimeout"; + HttpCodes[HttpCodes["Conflict"] = 409] = "Conflict"; + HttpCodes[HttpCodes["Gone"] = 410] = "Gone"; + HttpCodes[HttpCodes["TooManyRequests"] = 429] = "TooManyRequests"; + HttpCodes[HttpCodes["InternalServerError"] = 500] = "InternalServerError"; + HttpCodes[HttpCodes["NotImplemented"] = 501] = "NotImplemented"; + HttpCodes[HttpCodes["BadGateway"] = 502] = "BadGateway"; + HttpCodes[HttpCodes["ServiceUnavailable"] = 503] = "ServiceUnavailable"; + HttpCodes[HttpCodes["GatewayTimeout"] = 504] = "GatewayTimeout"; +})(HttpCodes = exports.HttpCodes || (exports.HttpCodes = {})); +var Headers; +(function (Headers) { + Headers["Accept"] = "accept"; + Headers["ContentType"] = "content-type"; +})(Headers = exports.Headers || (exports.Headers = {})); +var MediaTypes; +(function (MediaTypes) { + MediaTypes["ApplicationJson"] = "application/json"; +})(MediaTypes = exports.MediaTypes || (exports.MediaTypes = {})); +/** + * Returns the proxy URL, depending upon the supplied url and proxy environment variables. + * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com + */ +function getProxyUrl(serverUrl) { + let proxyUrl = pm.getProxyUrl(new URL(serverUrl)); + return proxyUrl ? proxyUrl.href : ''; +} +exports.getProxyUrl = getProxyUrl; +const HttpRedirectCodes = [ + HttpCodes.MovedPermanently, + HttpCodes.ResourceMoved, + HttpCodes.SeeOther, + HttpCodes.TemporaryRedirect, + HttpCodes.PermanentRedirect +]; +const HttpResponseRetryCodes = [ + HttpCodes.BadGateway, + HttpCodes.ServiceUnavailable, + HttpCodes.GatewayTimeout +]; +const RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD']; +const ExponentialBackoffCeiling = 10; +const ExponentialBackoffTimeSlice = 5; +class HttpClientError extends Error { + constructor(message, statusCode) { + super(message); + this.name = 'HttpClientError'; + this.statusCode = statusCode; + Object.setPrototypeOf(this, HttpClientError.prototype); + } +} +exports.HttpClientError = HttpClientError; +class HttpClientResponse { + constructor(message) { + this.message = message; + } + readBody() { + return new Promise(async (resolve, reject) => { + let output = Buffer.alloc(0); + this.message.on('data', (chunk) => { + output = Buffer.concat([output, chunk]); + }); + this.message.on('end', () => { + resolve(output.toString()); + }); + }); + } +} +exports.HttpClientResponse = HttpClientResponse; +function isHttps(requestUrl) { + let parsedUrl = new URL(requestUrl); + return parsedUrl.protocol === 'https:'; +} +exports.isHttps = isHttps; +class HttpClient { + constructor(userAgent, handlers, requestOptions) { + this._ignoreSslError = false; + this._allowRedirects = true; + this._allowRedirectDowngrade = false; + this._maxRedirects = 50; + this._allowRetries = false; + this._maxRetries = 1; + this._keepAlive = false; + this._disposed = false; + this.userAgent = userAgent; + this.handlers = handlers || []; + this.requestOptions = requestOptions; + if (requestOptions) { + if (requestOptions.ignoreSslError != null) { + this._ignoreSslError = requestOptions.ignoreSslError; + } + this._socketTimeout = requestOptions.socketTimeout; + if (requestOptions.allowRedirects != null) { + this._allowRedirects = requestOptions.allowRedirects; + } + if (requestOptions.allowRedirectDowngrade != null) { + this._allowRedirectDowngrade = requestOptions.allowRedirectDowngrade; + } + if (requestOptions.maxRedirects != null) { + this._maxRedirects = Math.max(requestOptions.maxRedirects, 0); + } + if (requestOptions.keepAlive != null) { + this._keepAlive = requestOptions.keepAlive; + } + if (requestOptions.allowRetries != null) { + this._allowRetries = requestOptions.allowRetries; + } + if (requestOptions.maxRetries != null) { + this._maxRetries = requestOptions.maxRetries; + } + } + } + options(requestUrl, additionalHeaders) { + return this.request('OPTIONS', requestUrl, null, additionalHeaders || {}); + } + get(requestUrl, additionalHeaders) { + return this.request('GET', requestUrl, null, additionalHeaders || {}); + } + del(requestUrl, additionalHeaders) { + return this.request('DELETE', requestUrl, null, additionalHeaders || {}); + } + post(requestUrl, data, additionalHeaders) { + return this.request('POST', requestUrl, data, additionalHeaders || {}); + } + patch(requestUrl, data, additionalHeaders) { + return this.request('PATCH', requestUrl, data, additionalHeaders || {}); + } + put(requestUrl, data, additionalHeaders) { + return this.request('PUT', requestUrl, data, additionalHeaders || {}); + } + head(requestUrl, additionalHeaders) { + return this.request('HEAD', requestUrl, null, additionalHeaders || {}); + } + sendStream(verb, requestUrl, stream, additionalHeaders) { + return this.request(verb, requestUrl, stream, additionalHeaders); + } + /** + * Gets a typed object from an endpoint + * Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise + */ + async getJson(requestUrl, additionalHeaders = {}) { + additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); + let res = await this.get(requestUrl, additionalHeaders); + return this._processResponse(res, this.requestOptions); + } + async postJson(requestUrl, obj, additionalHeaders = {}) { + let data = JSON.stringify(obj, null, 2); + additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); + additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); + let res = await this.post(requestUrl, data, additionalHeaders); + return this._processResponse(res, this.requestOptions); + } + async putJson(requestUrl, obj, additionalHeaders = {}) { + let data = JSON.stringify(obj, null, 2); + additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); + additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); + let res = await this.put(requestUrl, data, additionalHeaders); + return this._processResponse(res, this.requestOptions); + } + async patchJson(requestUrl, obj, additionalHeaders = {}) { + let data = JSON.stringify(obj, null, 2); + additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); + additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); + let res = await this.patch(requestUrl, data, additionalHeaders); + return this._processResponse(res, this.requestOptions); + } + /** + * Makes a raw http request. + * All other methods such as get, post, patch, and request ultimately call this. + * Prefer get, del, post and patch + */ + async request(verb, requestUrl, data, headers) { + if (this._disposed) { + throw new Error('Client has already been disposed.'); + } + let parsedUrl = new URL(requestUrl); + let info = this._prepareRequest(verb, parsedUrl, headers); + // Only perform retries on reads since writes may not be idempotent. + let maxTries = this._allowRetries && RetryableHttpVerbs.indexOf(verb) != -1 + ? this._maxRetries + 1 + : 1; + let numTries = 0; + let response; + while (numTries < maxTries) { + response = await this.requestRaw(info, data); + // Check if it's an authentication challenge + if (response && + response.message && + response.message.statusCode === HttpCodes.Unauthorized) { + let authenticationHandler; + for (let i = 0; i < this.handlers.length; i++) { + if (this.handlers[i].canHandleAuthentication(response)) { + authenticationHandler = this.handlers[i]; + break; + } + } + if (authenticationHandler) { + return authenticationHandler.handleAuthentication(this, info, data); + } + else { + // We have received an unauthorized response but have no handlers to handle it. + // Let the response return to the caller. + return response; + } + } + let redirectsRemaining = this._maxRedirects; + while (HttpRedirectCodes.indexOf(response.message.statusCode) != -1 && + this._allowRedirects && + redirectsRemaining > 0) { + const redirectUrl = response.message.headers['location']; + if (!redirectUrl) { + // if there's no location to redirect to, we won't + break; + } + let parsedRedirectUrl = new URL(redirectUrl); + if (parsedUrl.protocol == 'https:' && + parsedUrl.protocol != parsedRedirectUrl.protocol && + !this._allowRedirectDowngrade) { + throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.'); + } + // we need to finish reading the response before reassigning response + // which will leak the open socket. + await response.readBody(); + // strip authorization header if redirected to a different hostname + if (parsedRedirectUrl.hostname !== parsedUrl.hostname) { + for (let header in headers) { + // header names are case insensitive + if (header.toLowerCase() === 'authorization') { + delete headers[header]; + } + } + } + // let's make the request with the new redirectUrl + info = this._prepareRequest(verb, parsedRedirectUrl, headers); + response = await this.requestRaw(info, data); + redirectsRemaining--; + } + if (HttpResponseRetryCodes.indexOf(response.message.statusCode) == -1) { + // If not a retry code, return immediately instead of retrying + return response; + } + numTries += 1; + if (numTries < maxTries) { + await response.readBody(); + await this._performExponentialBackoff(numTries); + } + } + return response; + } + /** + * Needs to be called if keepAlive is set to true in request options. + */ + dispose() { + if (this._agent) { + this._agent.destroy(); + } + this._disposed = true; + } + /** + * Raw request. + * @param info + * @param data + */ + requestRaw(info, data) { + return new Promise((resolve, reject) => { + let callbackForResult = function (err, res) { + if (err) { + reject(err); + } + resolve(res); + }; + this.requestRawWithCallback(info, data, callbackForResult); + }); + } + /** + * Raw request with callback. + * @param info + * @param data + * @param onResult + */ + requestRawWithCallback(info, data, onResult) { + let socket; + if (typeof data === 'string') { + info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8'); + } + let callbackCalled = false; + let handleResult = (err, res) => { + if (!callbackCalled) { + callbackCalled = true; + onResult(err, res); + } + }; + let req = info.httpModule.request(info.options, (msg) => { + let res = new HttpClientResponse(msg); + handleResult(null, res); + }); + req.on('socket', sock => { + socket = sock; + }); + // If we ever get disconnected, we want the socket to timeout eventually + req.setTimeout(this._socketTimeout || 3 * 60000, () => { + if (socket) { + socket.end(); + } + handleResult(new Error('Request timeout: ' + info.options.path), null); + }); + req.on('error', function (err) { + // err has statusCode property + // res should have headers + handleResult(err, null); + }); + if (data && typeof data === 'string') { + req.write(data, 'utf8'); + } + if (data && typeof data !== 'string') { + data.on('close', function () { + req.end(); + }); + data.pipe(req); + } + else { + req.end(); + } + } + /** + * Gets an http agent. This function is useful when you need an http agent that handles + * routing through a proxy server - depending upon the url and proxy environment variables. + * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com + */ + getAgent(serverUrl) { + let parsedUrl = new URL(serverUrl); + return this._getAgent(parsedUrl); + } + _prepareRequest(method, requestUrl, headers) { + const info = {}; + info.parsedUrl = requestUrl; + const usingSsl = info.parsedUrl.protocol === 'https:'; + info.httpModule = usingSsl ? https : http; + const defaultPort = usingSsl ? 443 : 80; + info.options = {}; + info.options.host = info.parsedUrl.hostname; + info.options.port = info.parsedUrl.port + ? parseInt(info.parsedUrl.port) + : defaultPort; + info.options.path = + (info.parsedUrl.pathname || '') + (info.parsedUrl.search || ''); + info.options.method = method; + info.options.headers = this._mergeHeaders(headers); + if (this.userAgent != null) { + info.options.headers['user-agent'] = this.userAgent; + } + info.options.agent = this._getAgent(info.parsedUrl); + // gives handlers an opportunity to participate + if (this.handlers) { + this.handlers.forEach(handler => { + handler.prepareRequest(info.options); + }); + } + return info; + } + _mergeHeaders(headers) { + const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {}); + if (this.requestOptions && this.requestOptions.headers) { + return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers)); + } + return lowercaseKeys(headers || {}); + } + _getExistingOrDefaultHeader(additionalHeaders, header, _default) { + const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {}); + let clientHeader; + if (this.requestOptions && this.requestOptions.headers) { + clientHeader = lowercaseKeys(this.requestOptions.headers)[header]; + } + return additionalHeaders[header] || clientHeader || _default; + } + _getAgent(parsedUrl) { + let agent; + let proxyUrl = pm.getProxyUrl(parsedUrl); + let useProxy = proxyUrl && proxyUrl.hostname; + if (this._keepAlive && useProxy) { + agent = this._proxyAgent; + } + if (this._keepAlive && !useProxy) { + agent = this._agent; + } + // if agent is already assigned use that agent. + if (!!agent) { + return agent; + } + const usingSsl = parsedUrl.protocol === 'https:'; + let maxSockets = 100; + if (!!this.requestOptions) { + maxSockets = this.requestOptions.maxSockets || http.globalAgent.maxSockets; + } + if (useProxy) { + // If using proxy, need tunnel + if (!tunnel) { + tunnel = __nccwpck_require__(5683); + } + const agentOptions = { + maxSockets: maxSockets, + keepAlive: this._keepAlive, + proxy: { + ...((proxyUrl.username || proxyUrl.password) && { + proxyAuth: `${proxyUrl.username}:${proxyUrl.password}` + }), + host: proxyUrl.hostname, + port: proxyUrl.port + } + }; + let tunnelAgent; + const overHttps = proxyUrl.protocol === 'https:'; + if (usingSsl) { + tunnelAgent = overHttps ? tunnel.httpsOverHttps : tunnel.httpsOverHttp; + } + else { + tunnelAgent = overHttps ? tunnel.httpOverHttps : tunnel.httpOverHttp; + } + agent = tunnelAgent(agentOptions); + this._proxyAgent = agent; + } + // if reusing agent across request and tunneling agent isn't assigned create a new agent + if (this._keepAlive && !agent) { + const options = { keepAlive: this._keepAlive, maxSockets: maxSockets }; + agent = usingSsl ? new https.Agent(options) : new http.Agent(options); + this._agent = agent; + } + // if not using private agent and tunnel agent isn't setup then use global agent + if (!agent) { + agent = usingSsl ? https.globalAgent : http.globalAgent; + } + if (usingSsl && this._ignoreSslError) { + // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process + // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options + // we have to cast it to any and change it directly + agent.options = Object.assign(agent.options || {}, { + rejectUnauthorized: false + }); + } + return agent; + } + _performExponentialBackoff(retryNumber) { + retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber); + const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber); + return new Promise(resolve => setTimeout(() => resolve(), ms)); + } + static dateTimeDeserializer(key, value) { + if (typeof value === 'string') { + let a = new Date(value); + if (!isNaN(a.valueOf())) { + return a; + } + } + return value; + } + async _processResponse(res, options) { + return new Promise(async (resolve, reject) => { + const statusCode = res.message.statusCode; + const response = { + statusCode: statusCode, + result: null, + headers: {} + }; + // not found leads to null obj returned + if (statusCode == HttpCodes.NotFound) { + resolve(response); + } + let obj; + let contents; + // get the result from the body + try { + contents = await res.readBody(); + if (contents && contents.length > 0) { + if (options && options.deserializeDates) { + obj = JSON.parse(contents, HttpClient.dateTimeDeserializer); + } + else { + obj = JSON.parse(contents); + } + response.result = obj; + } + response.headers = res.message.headers; + } + catch (err) { + // Invalid resource (contents not json); leaving result obj null + } + // note that 3xx redirects are handled by the http layer. + if (statusCode > 299) { + let msg; + // if exception/error in body, attempt to get better error + if (obj && obj.message) { + msg = obj.message; + } + else if (contents && contents.length > 0) { + // it may be the case that the exception is in the body message as string + msg = contents; + } + else { + msg = 'Failed request: (' + statusCode + ')'; + } + let err = new HttpClientError(msg, statusCode); + err.result = response.result; + reject(err); + } + else { + resolve(response); + } + }); + } +} +exports.HttpClient = HttpClient; + + +/***/ }), + +/***/ 8994: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +function getProxyUrl(reqUrl) { + let usingSsl = reqUrl.protocol === 'https:'; + let proxyUrl; + if (checkBypass(reqUrl)) { + return proxyUrl; + } + let proxyVar; + if (usingSsl) { + proxyVar = process.env['https_proxy'] || process.env['HTTPS_PROXY']; + } + else { + proxyVar = process.env['http_proxy'] || process.env['HTTP_PROXY']; + } + if (proxyVar) { + proxyUrl = new URL(proxyVar); + } + return proxyUrl; +} +exports.getProxyUrl = getProxyUrl; +function checkBypass(reqUrl) { + if (!reqUrl.hostname) { + return false; + } + let noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || ''; + if (!noProxy) { + return false; + } + // Determine the request port + let reqPort; + if (reqUrl.port) { + reqPort = Number(reqUrl.port); + } + else if (reqUrl.protocol === 'http:') { + reqPort = 80; + } + else if (reqUrl.protocol === 'https:') { + reqPort = 443; + } + // Format the request hostname and hostname with port + let upperReqHosts = [reqUrl.hostname.toUpperCase()]; + if (typeof reqPort === 'number') { + upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`); + } + // Compare request host against noproxy + for (let upperNoProxyItem of noProxy + .split(',') + .map(x => x.trim().toUpperCase()) + .filter(x => x)) { + if (upperReqHosts.some(x => x === upperNoProxyItem)) { + return true; + } + } + return false; +} +exports.checkBypass = checkBypass; + + +/***/ }), + +/***/ 8399: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ value: true })); + +async function auth(token) { + const tokenType = token.split(/\./).length === 3 ? "app" : /^v\d+\./.test(token) ? "installation" : "oauth"; + return { + type: "token", + token: token, + tokenType + }; +} + +/** + * Prefix token for usage in the Authorization header + * + * @param token OAuth token or JSON Web Token + */ +function withAuthorizationPrefix(token) { + if (token.split(/\./).length === 3) { + return `bearer ${token}`; + } + + return `token ${token}`; +} + +async function hook(token, request, route, parameters) { + const endpoint = request.endpoint.merge(route, parameters); + endpoint.headers.authorization = withAuthorizationPrefix(token); + return request(endpoint); +} + +const createTokenAuth = function createTokenAuth(token) { + if (!token) { + throw new Error("[@octokit/auth-token] No token passed to createTokenAuth"); + } + + if (typeof token !== "string") { + throw new Error("[@octokit/auth-token] Token passed to createTokenAuth is not a string"); + } + + token = token.replace(/^(token|bearer) +/i, ""); + return Object.assign(auth.bind(null, token), { + hook: hook.bind(null, token) + }); +}; + +exports.createTokenAuth = createTokenAuth; +//# sourceMappingURL=index.js.map + + +/***/ }), + +/***/ 202: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ value: true })); + +var universalUserAgent = __nccwpck_require__(4378); +var beforeAfterHook = __nccwpck_require__(7895); +var request = __nccwpck_require__(1027); +var graphql = __nccwpck_require__(9250); +var authToken = __nccwpck_require__(8399); + +function _objectWithoutPropertiesLoose(source, excluded) { + if (source == null) return {}; + var target = {}; + var sourceKeys = Object.keys(source); + var key, i; + + for (i = 0; i < sourceKeys.length; i++) { + key = sourceKeys[i]; + if (excluded.indexOf(key) >= 0) continue; + target[key] = source[key]; + } + + return target; +} + +function _objectWithoutProperties(source, excluded) { + if (source == null) return {}; + + var target = _objectWithoutPropertiesLoose(source, excluded); + + var key, i; + + if (Object.getOwnPropertySymbols) { + var sourceSymbolKeys = Object.getOwnPropertySymbols(source); + + for (i = 0; i < sourceSymbolKeys.length; i++) { + key = sourceSymbolKeys[i]; + if (excluded.indexOf(key) >= 0) continue; + if (!Object.prototype.propertyIsEnumerable.call(source, key)) continue; + target[key] = source[key]; + } + } + + return target; +} + +const VERSION = "3.5.1"; + +const _excluded = ["authStrategy"]; +class Octokit { + constructor(options = {}) { + const hook = new beforeAfterHook.Collection(); + const requestDefaults = { + baseUrl: request.request.endpoint.DEFAULTS.baseUrl, + headers: {}, + request: Object.assign({}, options.request, { + // @ts-ignore internal usage only, no need to type + hook: hook.bind(null, "request") + }), + mediaType: { + previews: [], + format: "" + } + }; // prepend default user agent with `options.userAgent` if set + + requestDefaults.headers["user-agent"] = [options.userAgent, `octokit-core.js/${VERSION} ${universalUserAgent.getUserAgent()}`].filter(Boolean).join(" "); + + if (options.baseUrl) { + requestDefaults.baseUrl = options.baseUrl; + } + + if (options.previews) { + requestDefaults.mediaType.previews = options.previews; + } + + if (options.timeZone) { + requestDefaults.headers["time-zone"] = options.timeZone; + } + + this.request = request.request.defaults(requestDefaults); + this.graphql = graphql.withCustomRequest(this.request).defaults(requestDefaults); + this.log = Object.assign({ + debug: () => {}, + info: () => {}, + warn: console.warn.bind(console), + error: console.error.bind(console) + }, options.log); + this.hook = hook; // (1) If neither `options.authStrategy` nor `options.auth` are set, the `octokit` instance + // is unauthenticated. The `this.auth()` method is a no-op and no request hook is registered. + // (2) If only `options.auth` is set, use the default token authentication strategy. + // (3) If `options.authStrategy` is set then use it and pass in `options.auth`. Always pass own request as many strategies accept a custom request instance. + // TODO: type `options.auth` based on `options.authStrategy`. + + if (!options.authStrategy) { + if (!options.auth) { + // (1) + this.auth = async () => ({ + type: "unauthenticated" + }); + } else { + // (2) + const auth = authToken.createTokenAuth(options.auth); // @ts-ignore ¯\_(ツ)_/¯ + + hook.wrap("request", auth.hook); + this.auth = auth; + } + } else { + const { + authStrategy + } = options, + otherOptions = _objectWithoutProperties(options, _excluded); + + const auth = authStrategy(Object.assign({ + request: this.request, + log: this.log, + // we pass the current octokit instance as well as its constructor options + // to allow for authentication strategies that return a new octokit instance + // that shares the same internal state as the current one. The original + // requirement for this was the "event-octokit" authentication strategy + // of https://github.com/probot/octokit-auth-probot. + octokit: this, + octokitOptions: otherOptions + }, options.auth)); // @ts-ignore ¯\_(ツ)_/¯ + + hook.wrap("request", auth.hook); + this.auth = auth; + } // apply plugins + // https://stackoverflow.com/a/16345172 + + + const classConstructor = this.constructor; + classConstructor.plugins.forEach(plugin => { + Object.assign(this, plugin(this, options)); + }); + } + + static defaults(defaults) { + const OctokitWithDefaults = class extends this { + constructor(...args) { + const options = args[0] || {}; + + if (typeof defaults === "function") { + super(defaults(options)); + return; + } + + super(Object.assign({}, defaults, options, options.userAgent && defaults.userAgent ? { + userAgent: `${options.userAgent} ${defaults.userAgent}` + } : null)); + } + + }; + return OctokitWithDefaults; + } + /** + * Attach a plugin (or many) to your Octokit instance. + * + * @example + * const API = Octokit.plugin(plugin1, plugin2, plugin3, ...) + */ + + + static plugin(...newPlugins) { + var _a; + + const currentPlugins = this.plugins; + const NewOctokit = (_a = class extends this {}, _a.plugins = currentPlugins.concat(newPlugins.filter(plugin => !currentPlugins.includes(plugin))), _a); + return NewOctokit; + } + +} +Octokit.VERSION = VERSION; +Octokit.plugins = []; + +exports.Octokit = Octokit; +//# sourceMappingURL=index.js.map + + +/***/ }), + +/***/ 8114: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ value: true })); + +var isPlainObject = __nccwpck_require__(4202); +var universalUserAgent = __nccwpck_require__(4378); + +function lowercaseKeys(object) { + if (!object) { + return {}; + } + + return Object.keys(object).reduce((newObj, key) => { + newObj[key.toLowerCase()] = object[key]; + return newObj; + }, {}); +} + +function mergeDeep(defaults, options) { + const result = Object.assign({}, defaults); + Object.keys(options).forEach(key => { + if (isPlainObject.isPlainObject(options[key])) { + if (!(key in defaults)) Object.assign(result, { + [key]: options[key] + });else result[key] = mergeDeep(defaults[key], options[key]); + } else { + Object.assign(result, { + [key]: options[key] + }); + } + }); + return result; +} + +function removeUndefinedProperties(obj) { + for (const key in obj) { + if (obj[key] === undefined) { + delete obj[key]; + } + } + + return obj; +} + +function merge(defaults, route, options) { + if (typeof route === "string") { + let [method, url] = route.split(" "); + options = Object.assign(url ? { + method, + url + } : { + url: method + }, options); + } else { + options = Object.assign({}, route); + } // lowercase header names before merging with defaults to avoid duplicates + + + options.headers = lowercaseKeys(options.headers); // remove properties with undefined values before merging + + removeUndefinedProperties(options); + removeUndefinedProperties(options.headers); + const mergedOptions = mergeDeep(defaults || {}, options); // mediaType.previews arrays are merged, instead of overwritten + + if (defaults && defaults.mediaType.previews.length) { + mergedOptions.mediaType.previews = defaults.mediaType.previews.filter(preview => !mergedOptions.mediaType.previews.includes(preview)).concat(mergedOptions.mediaType.previews); + } + + mergedOptions.mediaType.previews = mergedOptions.mediaType.previews.map(preview => preview.replace(/-preview/, "")); + return mergedOptions; +} + +function addQueryParameters(url, parameters) { + const separator = /\?/.test(url) ? "&" : "?"; + const names = Object.keys(parameters); + + if (names.length === 0) { + return url; + } + + return url + separator + names.map(name => { + if (name === "q") { + return "q=" + parameters.q.split("+").map(encodeURIComponent).join("+"); + } + + return `${name}=${encodeURIComponent(parameters[name])}`; + }).join("&"); +} + +const urlVariableRegex = /\{[^}]+\}/g; + +function removeNonChars(variableName) { + return variableName.replace(/^\W+|\W+$/g, "").split(/,/); +} + +function extractUrlVariableNames(url) { + const matches = url.match(urlVariableRegex); + + if (!matches) { + return []; + } + + return matches.map(removeNonChars).reduce((a, b) => a.concat(b), []); +} + +function omit(object, keysToOmit) { + return Object.keys(object).filter(option => !keysToOmit.includes(option)).reduce((obj, key) => { + obj[key] = object[key]; + return obj; + }, {}); +} + +// Based on https://github.com/bramstein/url-template, licensed under BSD +// TODO: create separate package. +// +// Copyright (c) 2012-2014, Bram Stein +// All rights reserved. +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions +// are met: +// 1. Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// 2. Redistributions in binary form must reproduce the above copyright +// notice, this list of conditions and the following disclaimer in the +// documentation and/or other materials provided with the distribution. +// 3. The name of the author may not be used to endorse or promote products +// derived from this software without specific prior written permission. +// THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR IMPLIED +// WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF +// MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO +// EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, +// INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY +// OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, +// EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +/* istanbul ignore file */ +function encodeReserved(str) { + return str.split(/(%[0-9A-Fa-f]{2})/g).map(function (part) { + if (!/%[0-9A-Fa-f]/.test(part)) { + part = encodeURI(part).replace(/%5B/g, "[").replace(/%5D/g, "]"); + } + + return part; + }).join(""); +} + +function encodeUnreserved(str) { + return encodeURIComponent(str).replace(/[!'()*]/g, function (c) { + return "%" + c.charCodeAt(0).toString(16).toUpperCase(); + }); +} + +function encodeValue(operator, value, key) { + value = operator === "+" || operator === "#" ? encodeReserved(value) : encodeUnreserved(value); + + if (key) { + return encodeUnreserved(key) + "=" + value; + } else { + return value; + } +} + +function isDefined(value) { + return value !== undefined && value !== null; +} + +function isKeyOperator(operator) { + return operator === ";" || operator === "&" || operator === "?"; +} + +function getValues(context, operator, key, modifier) { + var value = context[key], + result = []; + + if (isDefined(value) && value !== "") { + if (typeof value === "string" || typeof value === "number" || typeof value === "boolean") { + value = value.toString(); + + if (modifier && modifier !== "*") { + value = value.substring(0, parseInt(modifier, 10)); + } + + result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : "")); + } else { + if (modifier === "*") { + if (Array.isArray(value)) { + value.filter(isDefined).forEach(function (value) { + result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : "")); + }); + } else { + Object.keys(value).forEach(function (k) { + if (isDefined(value[k])) { + result.push(encodeValue(operator, value[k], k)); + } + }); + } + } else { + const tmp = []; + + if (Array.isArray(value)) { + value.filter(isDefined).forEach(function (value) { + tmp.push(encodeValue(operator, value)); + }); + } else { + Object.keys(value).forEach(function (k) { + if (isDefined(value[k])) { + tmp.push(encodeUnreserved(k)); + tmp.push(encodeValue(operator, value[k].toString())); + } + }); + } + + if (isKeyOperator(operator)) { + result.push(encodeUnreserved(key) + "=" + tmp.join(",")); + } else if (tmp.length !== 0) { + result.push(tmp.join(",")); + } + } + } + } else { + if (operator === ";") { + if (isDefined(value)) { + result.push(encodeUnreserved(key)); + } + } else if (value === "" && (operator === "&" || operator === "?")) { + result.push(encodeUnreserved(key) + "="); + } else if (value === "") { + result.push(""); + } + } + + return result; +} + +function parseUrl(template) { + return { + expand: expand.bind(null, template) + }; +} + +function expand(template, context) { + var operators = ["+", "#", ".", "/", ";", "?", "&"]; + return template.replace(/\{([^\{\}]+)\}|([^\{\}]+)/g, function (_, expression, literal) { + if (expression) { + let operator = ""; + const values = []; + + if (operators.indexOf(expression.charAt(0)) !== -1) { + operator = expression.charAt(0); + expression = expression.substr(1); + } + + expression.split(/,/g).forEach(function (variable) { + var tmp = /([^:\*]*)(?::(\d+)|(\*))?/.exec(variable); + values.push(getValues(context, operator, tmp[1], tmp[2] || tmp[3])); + }); + + if (operator && operator !== "+") { + var separator = ","; + + if (operator === "?") { + separator = "&"; + } else if (operator !== "#") { + separator = operator; + } + + return (values.length !== 0 ? operator : "") + values.join(separator); + } else { + return values.join(","); + } + } else { + return encodeReserved(literal); + } + }); +} + +function parse(options) { + // https://fetch.spec.whatwg.org/#methods + let method = options.method.toUpperCase(); // replace :varname with {varname} to make it RFC 6570 compatible + + let url = (options.url || "/").replace(/:([a-z]\w+)/g, "{$1}"); + let headers = Object.assign({}, options.headers); + let body; + let parameters = omit(options, ["method", "baseUrl", "url", "headers", "request", "mediaType"]); // extract variable names from URL to calculate remaining variables later + + const urlVariableNames = extractUrlVariableNames(url); + url = parseUrl(url).expand(parameters); + + if (!/^http/.test(url)) { + url = options.baseUrl + url; + } + + const omittedParameters = Object.keys(options).filter(option => urlVariableNames.includes(option)).concat("baseUrl"); + const remainingParameters = omit(parameters, omittedParameters); + const isBinaryRequest = /application\/octet-stream/i.test(headers.accept); + + if (!isBinaryRequest) { + if (options.mediaType.format) { + // e.g. application/vnd.github.v3+json => application/vnd.github.v3.raw + headers.accept = headers.accept.split(/,/).map(preview => preview.replace(/application\/vnd(\.\w+)(\.v3)?(\.\w+)?(\+json)?$/, `application/vnd$1$2.${options.mediaType.format}`)).join(","); + } + + if (options.mediaType.previews.length) { + const previewsFromAcceptHeader = headers.accept.match(/[\w-]+(?=-preview)/g) || []; + headers.accept = previewsFromAcceptHeader.concat(options.mediaType.previews).map(preview => { + const format = options.mediaType.format ? `.${options.mediaType.format}` : "+json"; + return `application/vnd.github.${preview}-preview${format}`; + }).join(","); + } + } // for GET/HEAD requests, set URL query parameters from remaining parameters + // for PATCH/POST/PUT/DELETE requests, set request body from remaining parameters + + + if (["GET", "HEAD"].includes(method)) { + url = addQueryParameters(url, remainingParameters); + } else { + if ("data" in remainingParameters) { + body = remainingParameters.data; + } else { + if (Object.keys(remainingParameters).length) { + body = remainingParameters; + } else { + headers["content-length"] = 0; + } + } + } // default content-type for JSON if body is set + + + if (!headers["content-type"] && typeof body !== "undefined") { + headers["content-type"] = "application/json; charset=utf-8"; + } // GitHub expects 'content-length: 0' header for PUT/PATCH requests without body. + // fetch does not allow to set `content-length` header, but we can set body to an empty string + + + if (["PATCH", "PUT"].includes(method) && typeof body === "undefined") { + body = ""; + } // Only return body/request keys if present + + + return Object.assign({ + method, + url, + headers + }, typeof body !== "undefined" ? { + body + } : null, options.request ? { + request: options.request + } : null); +} + +function endpointWithDefaults(defaults, route, options) { + return parse(merge(defaults, route, options)); +} + +function withDefaults(oldDefaults, newDefaults) { + const DEFAULTS = merge(oldDefaults, newDefaults); + const endpoint = endpointWithDefaults.bind(null, DEFAULTS); + return Object.assign(endpoint, { + DEFAULTS, + defaults: withDefaults.bind(null, DEFAULTS), + merge: merge.bind(null, DEFAULTS), + parse + }); +} + +const VERSION = "6.0.11"; + +const userAgent = `octokit-endpoint.js/${VERSION} ${universalUserAgent.getUserAgent()}`; // DEFAULTS has all properties set that EndpointOptions has, except url. +// So we use RequestParameters and add method as additional required property. + +const DEFAULTS = { + method: "GET", + baseUrl: "https://api.github.com", + headers: { + accept: "application/vnd.github.v3+json", + "user-agent": userAgent + }, + mediaType: { + format: "", + previews: [] + } +}; + +const endpoint = withDefaults(null, DEFAULTS); + +exports.endpoint = endpoint; +//# sourceMappingURL=index.js.map + + +/***/ }), + +/***/ 9250: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ value: true })); + +var request = __nccwpck_require__(3882); +var universalUserAgent = __nccwpck_require__(4378); + +const VERSION = "4.6.0"; + +class GraphqlError extends Error { + constructor(request, response) { + const message = response.data.errors[0].message; + super(message); + Object.assign(this, response.data); + Object.assign(this, { + headers: response.headers + }); + this.name = "GraphqlError"; + this.request = request; // Maintains proper stack trace (only available on V8) + + /* istanbul ignore next */ + + if (Error.captureStackTrace) { + Error.captureStackTrace(this, this.constructor); + } + } + +} + +const NON_VARIABLE_OPTIONS = ["method", "baseUrl", "url", "headers", "request", "query", "mediaType"]; +const GHES_V3_SUFFIX_REGEX = /\/api\/v3\/?$/; +function graphql(request, query, options) { + if (typeof query === "string" && options && "query" in options) { + return Promise.reject(new Error(`[@octokit/graphql] "query" cannot be used as variable name`)); + } + + const parsedOptions = typeof query === "string" ? Object.assign({ + query + }, options) : query; + const requestOptions = Object.keys(parsedOptions).reduce((result, key) => { + if (NON_VARIABLE_OPTIONS.includes(key)) { + result[key] = parsedOptions[key]; + return result; + } + + if (!result.variables) { + result.variables = {}; + } + + result.variables[key] = parsedOptions[key]; + return result; + }, {}); // workaround for GitHub Enterprise baseUrl set with /api/v3 suffix + // https://github.com/octokit/auth-app.js/issues/111#issuecomment-657610451 + + const baseUrl = parsedOptions.baseUrl || request.endpoint.DEFAULTS.baseUrl; + + if (GHES_V3_SUFFIX_REGEX.test(baseUrl)) { + requestOptions.url = baseUrl.replace(GHES_V3_SUFFIX_REGEX, "/api/graphql"); + } + + return request(requestOptions).then(response => { + if (response.data.errors) { + const headers = {}; + + for (const key of Object.keys(response.headers)) { + headers[key] = response.headers[key]; + } + + throw new GraphqlError(requestOptions, { + headers, + data: response.data + }); + } + + return response.data.data; + }); +} + +function withDefaults(request$1, newDefaults) { + const newRequest = request$1.defaults(newDefaults); + + const newApi = (query, options) => { + return graphql(newRequest, query, options); + }; + + return Object.assign(newApi, { + defaults: withDefaults.bind(null, newRequest), + endpoint: request.request.endpoint + }); +} + +const graphql$1 = withDefaults(request.request, { + headers: { + "user-agent": `octokit-graphql.js/${VERSION} ${universalUserAgent.getUserAgent()}` + }, + method: "POST", + url: "/graphql" +}); +function withCustomRequest(customRequest) { + return withDefaults(customRequest, { + method: "POST", + url: "/graphql" + }); +} + +exports.graphql = graphql$1; +exports.withCustomRequest = withCustomRequest; +//# sourceMappingURL=index.js.map + + +/***/ }), + +/***/ 8415: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ value: true })); + +function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } + +var deprecation = __nccwpck_require__(5887); +var once = _interopDefault(__nccwpck_require__(447)); + +const logOnce = once(deprecation => console.warn(deprecation)); +/** + * Error with extra properties to help with debugging + */ + +class RequestError extends Error { + constructor(message, statusCode, options) { + super(message); // Maintains proper stack trace (only available on V8) + + /* istanbul ignore next */ + + if (Error.captureStackTrace) { + Error.captureStackTrace(this, this.constructor); + } + + this.name = "HttpError"; + this.status = statusCode; + Object.defineProperty(this, "code", { + get() { + logOnce(new deprecation.Deprecation("[@octokit/request-error] `error.code` is deprecated, use `error.status`.")); + return statusCode; + } + + }); + this.headers = options.headers || {}; // redact request credentials without mutating original request options + + const requestCopy = Object.assign({}, options.request); + + if (options.request.headers.authorization) { + requestCopy.headers = Object.assign({}, options.request.headers, { + authorization: options.request.headers.authorization.replace(/ .*$/, " [REDACTED]") + }); + } + + requestCopy.url = requestCopy.url // client_id & client_secret can be passed as URL query parameters to increase rate limit + // see https://developer.github.com/v3/#increasing-the-unauthenticated-rate-limit-for-oauth-applications + .replace(/\bclient_secret=\w+/g, "client_secret=[REDACTED]") // OAuth tokens can be passed as URL query parameters, although it is not recommended + // see https://developer.github.com/v3/#oauth2-token-sent-in-a-header + .replace(/\baccess_token=\w+/g, "access_token=[REDACTED]"); + this.request = requestCopy; + } + +} + +exports.RequestError = RequestError; +//# sourceMappingURL=index.js.map + + +/***/ }), + +/***/ 3882: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ value: true })); + +function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } + +var endpoint = __nccwpck_require__(8114); +var universalUserAgent = __nccwpck_require__(4378); +var isPlainObject = __nccwpck_require__(4202); +var nodeFetch = _interopDefault(__nccwpck_require__(1307)); +var requestError = __nccwpck_require__(8415); + +const VERSION = "5.4.14"; + +function getBufferResponse(response) { + return response.arrayBuffer(); +} + +function fetchWrapper(requestOptions) { + if (isPlainObject.isPlainObject(requestOptions.body) || Array.isArray(requestOptions.body)) { + requestOptions.body = JSON.stringify(requestOptions.body); + } + + let headers = {}; + let status; + let url; + const fetch = requestOptions.request && requestOptions.request.fetch || nodeFetch; + return fetch(requestOptions.url, Object.assign({ + method: requestOptions.method, + body: requestOptions.body, + headers: requestOptions.headers, + redirect: requestOptions.redirect + }, requestOptions.request)).then(response => { + url = response.url; + status = response.status; + + for (const keyAndValue of response.headers) { + headers[keyAndValue[0]] = keyAndValue[1]; + } + + if (status === 204 || status === 205) { + return; + } // GitHub API returns 200 for HEAD requests + + + if (requestOptions.method === "HEAD") { + if (status < 400) { + return; + } + + throw new requestError.RequestError(response.statusText, status, { + headers, + request: requestOptions + }); + } + + if (status === 304) { + throw new requestError.RequestError("Not modified", status, { + headers, + request: requestOptions + }); + } + + if (status >= 400) { + return response.text().then(message => { + const error = new requestError.RequestError(message, status, { + headers, + request: requestOptions + }); + + try { + let responseBody = JSON.parse(error.message); + Object.assign(error, responseBody); + let errors = responseBody.errors; // Assumption `errors` would always be in Array format + + error.message = error.message + ": " + errors.map(JSON.stringify).join(", "); + } catch (e) {// ignore, see octokit/rest.js#684 + } + + throw error; + }); + } + + const contentType = response.headers.get("content-type"); + + if (/application\/json/.test(contentType)) { + return response.json(); + } + + if (!contentType || /^text\/|charset=utf-8$/.test(contentType)) { + return response.text(); + } + + return getBufferResponse(response); + }).then(data => { + return { + status, + url, + headers, + data + }; + }).catch(error => { + if (error instanceof requestError.RequestError) { + throw error; + } + + throw new requestError.RequestError(error.message, 500, { + headers, + request: requestOptions + }); + }); +} + +function withDefaults(oldEndpoint, newDefaults) { + const endpoint = oldEndpoint.defaults(newDefaults); + + const newApi = function (route, parameters) { + const endpointOptions = endpoint.merge(route, parameters); + + if (!endpointOptions.request || !endpointOptions.request.hook) { + return fetchWrapper(endpoint.parse(endpointOptions)); + } + + const request = (route, parameters) => { + return fetchWrapper(endpoint.parse(endpoint.merge(route, parameters))); + }; + + Object.assign(request, { + endpoint, + defaults: withDefaults.bind(null, endpoint) + }); + return endpointOptions.request.hook(request, endpointOptions); + }; + + return Object.assign(newApi, { + endpoint, + defaults: withDefaults.bind(null, endpoint) + }); +} + +const request = withDefaults(endpoint.endpoint, { + headers: { + "user-agent": `octokit-request.js/${VERSION} ${universalUserAgent.getUserAgent()}` + } +}); + +exports.request = request; +//# sourceMappingURL=index.js.map + + +/***/ }), + +/***/ 3654: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ value: true })); + +const VERSION = "2.13.6"; + +function ownKeys(object, enumerableOnly) { + var keys = Object.keys(object); + + if (Object.getOwnPropertySymbols) { + var symbols = Object.getOwnPropertySymbols(object); + + if (enumerableOnly) { + symbols = symbols.filter(function (sym) { + return Object.getOwnPropertyDescriptor(object, sym).enumerable; + }); + } + + keys.push.apply(keys, symbols); + } + + return keys; +} + +function _objectSpread2(target) { + for (var i = 1; i < arguments.length; i++) { + var source = arguments[i] != null ? arguments[i] : {}; + + if (i % 2) { + ownKeys(Object(source), true).forEach(function (key) { + _defineProperty(target, key, source[key]); + }); + } else if (Object.getOwnPropertyDescriptors) { + Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)); + } else { + ownKeys(Object(source)).forEach(function (key) { + Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); + }); + } + } + + return target; +} + +function _defineProperty(obj, key, value) { + if (key in obj) { + Object.defineProperty(obj, key, { + value: value, + enumerable: true, + configurable: true, + writable: true + }); + } else { + obj[key] = value; + } + + return obj; +} + +/** + * Some “list” response that can be paginated have a different response structure + * + * They have a `total_count` key in the response (search also has `incomplete_results`, + * /installation/repositories also has `repository_selection`), as well as a key with + * the list of the items which name varies from endpoint to endpoint. + * + * Octokit normalizes these responses so that paginated results are always returned following + * the same structure. One challenge is that if the list response has only one page, no Link + * header is provided, so this header alone is not sufficient to check wether a response is + * paginated or not. + * + * We check if a "total_count" key is present in the response data, but also make sure that + * a "url" property is not, as the "Get the combined status for a specific ref" endpoint would + * otherwise match: https://developer.github.com/v3/repos/statuses/#get-the-combined-status-for-a-specific-ref + */ +function normalizePaginatedListResponse(response) { + // endpoints can respond with 204 if repository is empty + if (!response.data) { + return _objectSpread2(_objectSpread2({}, response), {}, { + data: [] + }); + } + + const responseNeedsNormalization = "total_count" in response.data && !("url" in response.data); + if (!responseNeedsNormalization) return response; // keep the additional properties intact as there is currently no other way + // to retrieve the same information. + + const incompleteResults = response.data.incomplete_results; + const repositorySelection = response.data.repository_selection; + const totalCount = response.data.total_count; + delete response.data.incomplete_results; + delete response.data.repository_selection; + delete response.data.total_count; + const namespaceKey = Object.keys(response.data)[0]; + const data = response.data[namespaceKey]; + response.data = data; + + if (typeof incompleteResults !== "undefined") { + response.data.incomplete_results = incompleteResults; + } + + if (typeof repositorySelection !== "undefined") { + response.data.repository_selection = repositorySelection; + } + + response.data.total_count = totalCount; + return response; +} + +function iterator(octokit, route, parameters) { + const options = typeof route === "function" ? route.endpoint(parameters) : octokit.request.endpoint(route, parameters); + const requestMethod = typeof route === "function" ? route : octokit.request; + const method = options.method; + const headers = options.headers; + let url = options.url; + return { + [Symbol.asyncIterator]: () => ({ + async next() { + if (!url) return { + done: true + }; + + try { + const response = await requestMethod({ + method, + url, + headers + }); + const normalizedResponse = normalizePaginatedListResponse(response); // `response.headers.link` format: + // '; rel="next", ; rel="last"' + // sets `url` to undefined if "next" URL is not present or `link` header is not set + + url = ((normalizedResponse.headers.link || "").match(/<([^>]+)>;\s*rel="next"/) || [])[1]; + return { + value: normalizedResponse + }; + } catch (error) { + if (error.status !== 409) throw error; + url = ""; + return { + value: { + status: 200, + headers: {}, + data: [] + } + }; + } + } + + }) + }; +} + +function paginate(octokit, route, parameters, mapFn) { + if (typeof parameters === "function") { + mapFn = parameters; + parameters = undefined; + } + + return gather(octokit, [], iterator(octokit, route, parameters)[Symbol.asyncIterator](), mapFn); +} + +function gather(octokit, results, iterator, mapFn) { + return iterator.next().then(result => { + if (result.done) { + return results; + } + + let earlyExit = false; + + function done() { + earlyExit = true; + } + + results = results.concat(mapFn ? mapFn(result.value, done) : result.value.data); + + if (earlyExit) { + return results; + } + + return gather(octokit, results, iterator, mapFn); + }); +} + +const composePaginateRest = Object.assign(paginate, { + iterator +}); + +const paginatingEndpoints = ["GET /app/installations", "GET /applications/grants", "GET /authorizations", "GET /enterprises/{enterprise}/actions/permissions/organizations", "GET /enterprises/{enterprise}/actions/runner-groups", "GET /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/organizations", "GET /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/runners", "GET /enterprises/{enterprise}/actions/runners", "GET /enterprises/{enterprise}/actions/runners/downloads", "GET /events", "GET /gists", "GET /gists/public", "GET /gists/starred", "GET /gists/{gist_id}/comments", "GET /gists/{gist_id}/commits", "GET /gists/{gist_id}/forks", "GET /installation/repositories", "GET /issues", "GET /marketplace_listing/plans", "GET /marketplace_listing/plans/{plan_id}/accounts", "GET /marketplace_listing/stubbed/plans", "GET /marketplace_listing/stubbed/plans/{plan_id}/accounts", "GET /networks/{owner}/{repo}/events", "GET /notifications", "GET /organizations", "GET /orgs/{org}/actions/permissions/repositories", "GET /orgs/{org}/actions/runner-groups", "GET /orgs/{org}/actions/runner-groups/{runner_group_id}/repositories", "GET /orgs/{org}/actions/runner-groups/{runner_group_id}/runners", "GET /orgs/{org}/actions/runners", "GET /orgs/{org}/actions/runners/downloads", "GET /orgs/{org}/actions/secrets", "GET /orgs/{org}/actions/secrets/{secret_name}/repositories", "GET /orgs/{org}/blocks", "GET /orgs/{org}/credential-authorizations", "GET /orgs/{org}/events", "GET /orgs/{org}/failed_invitations", "GET /orgs/{org}/hooks", "GET /orgs/{org}/installations", "GET /orgs/{org}/invitations", "GET /orgs/{org}/invitations/{invitation_id}/teams", "GET /orgs/{org}/issues", "GET /orgs/{org}/members", "GET /orgs/{org}/migrations", "GET /orgs/{org}/migrations/{migration_id}/repositories", "GET /orgs/{org}/outside_collaborators", "GET /orgs/{org}/projects", "GET /orgs/{org}/public_members", "GET /orgs/{org}/repos", "GET /orgs/{org}/team-sync/groups", "GET /orgs/{org}/teams", "GET /orgs/{org}/teams/{team_slug}/discussions", "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments", "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions", "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions", "GET /orgs/{org}/teams/{team_slug}/invitations", "GET /orgs/{org}/teams/{team_slug}/members", "GET /orgs/{org}/teams/{team_slug}/projects", "GET /orgs/{org}/teams/{team_slug}/repos", "GET /orgs/{org}/teams/{team_slug}/team-sync/group-mappings", "GET /orgs/{org}/teams/{team_slug}/teams", "GET /projects/columns/{column_id}/cards", "GET /projects/{project_id}/collaborators", "GET /projects/{project_id}/columns", "GET /repos/{owner}/{repo}/actions/artifacts", "GET /repos/{owner}/{repo}/actions/runners", "GET /repos/{owner}/{repo}/actions/runners/downloads", "GET /repos/{owner}/{repo}/actions/runs", "GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts", "GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs", "GET /repos/{owner}/{repo}/actions/secrets", "GET /repos/{owner}/{repo}/actions/workflows", "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs", "GET /repos/{owner}/{repo}/assignees", "GET /repos/{owner}/{repo}/branches", "GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations", "GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs", "GET /repos/{owner}/{repo}/code-scanning/alerts", "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances", "GET /repos/{owner}/{repo}/code-scanning/analyses", "GET /repos/{owner}/{repo}/collaborators", "GET /repos/{owner}/{repo}/comments", "GET /repos/{owner}/{repo}/comments/{comment_id}/reactions", "GET /repos/{owner}/{repo}/commits", "GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head", "GET /repos/{owner}/{repo}/commits/{commit_sha}/comments", "GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls", "GET /repos/{owner}/{repo}/commits/{ref}/check-runs", "GET /repos/{owner}/{repo}/commits/{ref}/check-suites", "GET /repos/{owner}/{repo}/commits/{ref}/statuses", "GET /repos/{owner}/{repo}/contributors", "GET /repos/{owner}/{repo}/deployments", "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses", "GET /repos/{owner}/{repo}/events", "GET /repos/{owner}/{repo}/forks", "GET /repos/{owner}/{repo}/git/matching-refs/{ref}", "GET /repos/{owner}/{repo}/hooks", "GET /repos/{owner}/{repo}/invitations", "GET /repos/{owner}/{repo}/issues", "GET /repos/{owner}/{repo}/issues/comments", "GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions", "GET /repos/{owner}/{repo}/issues/events", "GET /repos/{owner}/{repo}/issues/{issue_number}/comments", "GET /repos/{owner}/{repo}/issues/{issue_number}/events", "GET /repos/{owner}/{repo}/issues/{issue_number}/labels", "GET /repos/{owner}/{repo}/issues/{issue_number}/reactions", "GET /repos/{owner}/{repo}/issues/{issue_number}/timeline", "GET /repos/{owner}/{repo}/keys", "GET /repos/{owner}/{repo}/labels", "GET /repos/{owner}/{repo}/milestones", "GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels", "GET /repos/{owner}/{repo}/notifications", "GET /repos/{owner}/{repo}/pages/builds", "GET /repos/{owner}/{repo}/projects", "GET /repos/{owner}/{repo}/pulls", "GET /repos/{owner}/{repo}/pulls/comments", "GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions", "GET /repos/{owner}/{repo}/pulls/{pull_number}/comments", "GET /repos/{owner}/{repo}/pulls/{pull_number}/commits", "GET /repos/{owner}/{repo}/pulls/{pull_number}/files", "GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers", "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews", "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments", "GET /repos/{owner}/{repo}/releases", "GET /repos/{owner}/{repo}/releases/{release_id}/assets", "GET /repos/{owner}/{repo}/secret-scanning/alerts", "GET /repos/{owner}/{repo}/stargazers", "GET /repos/{owner}/{repo}/subscribers", "GET /repos/{owner}/{repo}/tags", "GET /repos/{owner}/{repo}/teams", "GET /repositories", "GET /repositories/{repository_id}/environments/{environment_name}/secrets", "GET /scim/v2/enterprises/{enterprise}/Groups", "GET /scim/v2/enterprises/{enterprise}/Users", "GET /scim/v2/organizations/{org}/Users", "GET /search/code", "GET /search/commits", "GET /search/issues", "GET /search/labels", "GET /search/repositories", "GET /search/topics", "GET /search/users", "GET /teams/{team_id}/discussions", "GET /teams/{team_id}/discussions/{discussion_number}/comments", "GET /teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}/reactions", "GET /teams/{team_id}/discussions/{discussion_number}/reactions", "GET /teams/{team_id}/invitations", "GET /teams/{team_id}/members", "GET /teams/{team_id}/projects", "GET /teams/{team_id}/repos", "GET /teams/{team_id}/team-sync/group-mappings", "GET /teams/{team_id}/teams", "GET /user/blocks", "GET /user/emails", "GET /user/followers", "GET /user/following", "GET /user/gpg_keys", "GET /user/installations", "GET /user/installations/{installation_id}/repositories", "GET /user/issues", "GET /user/keys", "GET /user/marketplace_purchases", "GET /user/marketplace_purchases/stubbed", "GET /user/memberships/orgs", "GET /user/migrations", "GET /user/migrations/{migration_id}/repositories", "GET /user/orgs", "GET /user/public_emails", "GET /user/repos", "GET /user/repository_invitations", "GET /user/starred", "GET /user/subscriptions", "GET /user/teams", "GET /users", "GET /users/{username}/events", "GET /users/{username}/events/orgs/{org}", "GET /users/{username}/events/public", "GET /users/{username}/followers", "GET /users/{username}/following", "GET /users/{username}/gists", "GET /users/{username}/gpg_keys", "GET /users/{username}/keys", "GET /users/{username}/orgs", "GET /users/{username}/projects", "GET /users/{username}/received_events", "GET /users/{username}/received_events/public", "GET /users/{username}/repos", "GET /users/{username}/starred", "GET /users/{username}/subscriptions"]; + +function isPaginatingEndpoint(arg) { + if (typeof arg === "string") { + return paginatingEndpoints.includes(arg); + } else { + return false; + } +} + +/** + * @param octokit Octokit instance + * @param options Options passed to Octokit constructor + */ + +function paginateRest(octokit) { + return { + paginate: Object.assign(paginate.bind(null, octokit), { + iterator: iterator.bind(null, octokit) + }) + }; +} +paginateRest.VERSION = VERSION; + +exports.composePaginateRest = composePaginateRest; +exports.isPaginatingEndpoint = isPaginatingEndpoint; +exports.paginateRest = paginateRest; +exports.paginatingEndpoints = paginatingEndpoints; +//# sourceMappingURL=index.js.map + + +/***/ }), + +/***/ 3543: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ value: true })); + +function ownKeys(object, enumerableOnly) { + var keys = Object.keys(object); + + if (Object.getOwnPropertySymbols) { + var symbols = Object.getOwnPropertySymbols(object); + + if (enumerableOnly) { + symbols = symbols.filter(function (sym) { + return Object.getOwnPropertyDescriptor(object, sym).enumerable; + }); + } + + keys.push.apply(keys, symbols); + } + + return keys; +} + +function _objectSpread2(target) { + for (var i = 1; i < arguments.length; i++) { + var source = arguments[i] != null ? arguments[i] : {}; + + if (i % 2) { + ownKeys(Object(source), true).forEach(function (key) { + _defineProperty(target, key, source[key]); + }); + } else if (Object.getOwnPropertyDescriptors) { + Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)); + } else { + ownKeys(Object(source)).forEach(function (key) { + Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); + }); + } + } + + return target; +} + +function _defineProperty(obj, key, value) { + if (key in obj) { + Object.defineProperty(obj, key, { + value: value, + enumerable: true, + configurable: true, + writable: true + }); + } else { + obj[key] = value; + } + + return obj; +} + +const Endpoints = { + actions: { + addSelectedRepoToOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}"], + approveWorkflowRun: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/approve"], + cancelWorkflowRun: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel"], + createOrUpdateEnvironmentSecret: ["PUT /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}"], + createOrUpdateOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}"], + createOrUpdateRepoSecret: ["PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}"], + createRegistrationTokenForOrg: ["POST /orgs/{org}/actions/runners/registration-token"], + createRegistrationTokenForRepo: ["POST /repos/{owner}/{repo}/actions/runners/registration-token"], + createRemoveTokenForOrg: ["POST /orgs/{org}/actions/runners/remove-token"], + createRemoveTokenForRepo: ["POST /repos/{owner}/{repo}/actions/runners/remove-token"], + createWorkflowDispatch: ["POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches"], + deleteArtifact: ["DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"], + deleteEnvironmentSecret: ["DELETE /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}"], + deleteOrgSecret: ["DELETE /orgs/{org}/actions/secrets/{secret_name}"], + deleteRepoSecret: ["DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}"], + deleteSelfHostedRunnerFromOrg: ["DELETE /orgs/{org}/actions/runners/{runner_id}"], + deleteSelfHostedRunnerFromRepo: ["DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}"], + deleteWorkflowRun: ["DELETE /repos/{owner}/{repo}/actions/runs/{run_id}"], + deleteWorkflowRunLogs: ["DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs"], + disableSelectedRepositoryGithubActionsOrganization: ["DELETE /orgs/{org}/actions/permissions/repositories/{repository_id}"], + disableWorkflow: ["PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/disable"], + downloadArtifact: ["GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}"], + downloadJobLogsForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs"], + downloadWorkflowRunLogs: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs"], + enableSelectedRepositoryGithubActionsOrganization: ["PUT /orgs/{org}/actions/permissions/repositories/{repository_id}"], + enableWorkflow: ["PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/enable"], + getAllowedActionsOrganization: ["GET /orgs/{org}/actions/permissions/selected-actions"], + getAllowedActionsRepository: ["GET /repos/{owner}/{repo}/actions/permissions/selected-actions"], + getArtifact: ["GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"], + getEnvironmentPublicKey: ["GET /repositories/{repository_id}/environments/{environment_name}/secrets/public-key"], + getEnvironmentSecret: ["GET /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}"], + getGithubActionsPermissionsOrganization: ["GET /orgs/{org}/actions/permissions"], + getGithubActionsPermissionsRepository: ["GET /repos/{owner}/{repo}/actions/permissions"], + getJobForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}"], + getOrgPublicKey: ["GET /orgs/{org}/actions/secrets/public-key"], + getOrgSecret: ["GET /orgs/{org}/actions/secrets/{secret_name}"], + getPendingDeploymentsForRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments"], + getRepoPermissions: ["GET /repos/{owner}/{repo}/actions/permissions", {}, { + renamed: ["actions", "getGithubActionsPermissionsRepository"] + }], + getRepoPublicKey: ["GET /repos/{owner}/{repo}/actions/secrets/public-key"], + getRepoSecret: ["GET /repos/{owner}/{repo}/actions/secrets/{secret_name}"], + getReviewsForRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/approvals"], + getSelfHostedRunnerForOrg: ["GET /orgs/{org}/actions/runners/{runner_id}"], + getSelfHostedRunnerForRepo: ["GET /repos/{owner}/{repo}/actions/runners/{runner_id}"], + getWorkflow: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}"], + getWorkflowRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}"], + getWorkflowRunUsage: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing"], + getWorkflowUsage: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing"], + listArtifactsForRepo: ["GET /repos/{owner}/{repo}/actions/artifacts"], + listEnvironmentSecrets: ["GET /repositories/{repository_id}/environments/{environment_name}/secrets"], + listJobsForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs"], + listOrgSecrets: ["GET /orgs/{org}/actions/secrets"], + listRepoSecrets: ["GET /repos/{owner}/{repo}/actions/secrets"], + listRepoWorkflows: ["GET /repos/{owner}/{repo}/actions/workflows"], + listRunnerApplicationsForOrg: ["GET /orgs/{org}/actions/runners/downloads"], + listRunnerApplicationsForRepo: ["GET /repos/{owner}/{repo}/actions/runners/downloads"], + listSelectedReposForOrgSecret: ["GET /orgs/{org}/actions/secrets/{secret_name}/repositories"], + listSelectedRepositoriesEnabledGithubActionsOrganization: ["GET /orgs/{org}/actions/permissions/repositories"], + listSelfHostedRunnersForOrg: ["GET /orgs/{org}/actions/runners"], + listSelfHostedRunnersForRepo: ["GET /repos/{owner}/{repo}/actions/runners"], + listWorkflowRunArtifacts: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts"], + listWorkflowRuns: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs"], + listWorkflowRunsForRepo: ["GET /repos/{owner}/{repo}/actions/runs"], + reRunWorkflow: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun"], + removeSelectedRepoFromOrgSecret: ["DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}"], + reviewPendingDeploymentsForRun: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments"], + setAllowedActionsOrganization: ["PUT /orgs/{org}/actions/permissions/selected-actions"], + setAllowedActionsRepository: ["PUT /repos/{owner}/{repo}/actions/permissions/selected-actions"], + setGithubActionsPermissionsOrganization: ["PUT /orgs/{org}/actions/permissions"], + setGithubActionsPermissionsRepository: ["PUT /repos/{owner}/{repo}/actions/permissions"], + setSelectedReposForOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}/repositories"], + setSelectedRepositoriesEnabledGithubActionsOrganization: ["PUT /orgs/{org}/actions/permissions/repositories"] + }, + activity: { + checkRepoIsStarredByAuthenticatedUser: ["GET /user/starred/{owner}/{repo}"], + deleteRepoSubscription: ["DELETE /repos/{owner}/{repo}/subscription"], + deleteThreadSubscription: ["DELETE /notifications/threads/{thread_id}/subscription"], + getFeeds: ["GET /feeds"], + getRepoSubscription: ["GET /repos/{owner}/{repo}/subscription"], + getThread: ["GET /notifications/threads/{thread_id}"], + getThreadSubscriptionForAuthenticatedUser: ["GET /notifications/threads/{thread_id}/subscription"], + listEventsForAuthenticatedUser: ["GET /users/{username}/events"], + listNotificationsForAuthenticatedUser: ["GET /notifications"], + listOrgEventsForAuthenticatedUser: ["GET /users/{username}/events/orgs/{org}"], + listPublicEvents: ["GET /events"], + listPublicEventsForRepoNetwork: ["GET /networks/{owner}/{repo}/events"], + listPublicEventsForUser: ["GET /users/{username}/events/public"], + listPublicOrgEvents: ["GET /orgs/{org}/events"], + listReceivedEventsForUser: ["GET /users/{username}/received_events"], + listReceivedPublicEventsForUser: ["GET /users/{username}/received_events/public"], + listRepoEvents: ["GET /repos/{owner}/{repo}/events"], + listRepoNotificationsForAuthenticatedUser: ["GET /repos/{owner}/{repo}/notifications"], + listReposStarredByAuthenticatedUser: ["GET /user/starred"], + listReposStarredByUser: ["GET /users/{username}/starred"], + listReposWatchedByUser: ["GET /users/{username}/subscriptions"], + listStargazersForRepo: ["GET /repos/{owner}/{repo}/stargazers"], + listWatchedReposForAuthenticatedUser: ["GET /user/subscriptions"], + listWatchersForRepo: ["GET /repos/{owner}/{repo}/subscribers"], + markNotificationsAsRead: ["PUT /notifications"], + markRepoNotificationsAsRead: ["PUT /repos/{owner}/{repo}/notifications"], + markThreadAsRead: ["PATCH /notifications/threads/{thread_id}"], + setRepoSubscription: ["PUT /repos/{owner}/{repo}/subscription"], + setThreadSubscription: ["PUT /notifications/threads/{thread_id}/subscription"], + starRepoForAuthenticatedUser: ["PUT /user/starred/{owner}/{repo}"], + unstarRepoForAuthenticatedUser: ["DELETE /user/starred/{owner}/{repo}"] + }, + apps: { + addRepoToInstallation: ["PUT /user/installations/{installation_id}/repositories/{repository_id}"], + checkToken: ["POST /applications/{client_id}/token"], + createContentAttachment: ["POST /content_references/{content_reference_id}/attachments", { + mediaType: { + previews: ["corsair"] + } + }], + createContentAttachmentForRepo: ["POST /repos/{owner}/{repo}/content_references/{content_reference_id}/attachments", { + mediaType: { + previews: ["corsair"] + } + }], + createFromManifest: ["POST /app-manifests/{code}/conversions"], + createInstallationAccessToken: ["POST /app/installations/{installation_id}/access_tokens"], + deleteAuthorization: ["DELETE /applications/{client_id}/grant"], + deleteInstallation: ["DELETE /app/installations/{installation_id}"], + deleteToken: ["DELETE /applications/{client_id}/token"], + getAuthenticated: ["GET /app"], + getBySlug: ["GET /apps/{app_slug}"], + getInstallation: ["GET /app/installations/{installation_id}"], + getOrgInstallation: ["GET /orgs/{org}/installation"], + getRepoInstallation: ["GET /repos/{owner}/{repo}/installation"], + getSubscriptionPlanForAccount: ["GET /marketplace_listing/accounts/{account_id}"], + getSubscriptionPlanForAccountStubbed: ["GET /marketplace_listing/stubbed/accounts/{account_id}"], + getUserInstallation: ["GET /users/{username}/installation"], + getWebhookConfigForApp: ["GET /app/hook/config"], + listAccountsForPlan: ["GET /marketplace_listing/plans/{plan_id}/accounts"], + listAccountsForPlanStubbed: ["GET /marketplace_listing/stubbed/plans/{plan_id}/accounts"], + listInstallationReposForAuthenticatedUser: ["GET /user/installations/{installation_id}/repositories"], + listInstallations: ["GET /app/installations"], + listInstallationsForAuthenticatedUser: ["GET /user/installations"], + listPlans: ["GET /marketplace_listing/plans"], + listPlansStubbed: ["GET /marketplace_listing/stubbed/plans"], + listReposAccessibleToInstallation: ["GET /installation/repositories"], + listSubscriptionsForAuthenticatedUser: ["GET /user/marketplace_purchases"], + listSubscriptionsForAuthenticatedUserStubbed: ["GET /user/marketplace_purchases/stubbed"], + removeRepoFromInstallation: ["DELETE /user/installations/{installation_id}/repositories/{repository_id}"], + resetToken: ["PATCH /applications/{client_id}/token"], + revokeInstallationAccessToken: ["DELETE /installation/token"], + scopeToken: ["POST /applications/{client_id}/token/scoped"], + suspendInstallation: ["PUT /app/installations/{installation_id}/suspended"], + unsuspendInstallation: ["DELETE /app/installations/{installation_id}/suspended"], + updateWebhookConfigForApp: ["PATCH /app/hook/config"] + }, + billing: { + getGithubActionsBillingOrg: ["GET /orgs/{org}/settings/billing/actions"], + getGithubActionsBillingUser: ["GET /users/{username}/settings/billing/actions"], + getGithubPackagesBillingOrg: ["GET /orgs/{org}/settings/billing/packages"], + getGithubPackagesBillingUser: ["GET /users/{username}/settings/billing/packages"], + getSharedStorageBillingOrg: ["GET /orgs/{org}/settings/billing/shared-storage"], + getSharedStorageBillingUser: ["GET /users/{username}/settings/billing/shared-storage"] + }, + checks: { + create: ["POST /repos/{owner}/{repo}/check-runs"], + createSuite: ["POST /repos/{owner}/{repo}/check-suites"], + get: ["GET /repos/{owner}/{repo}/check-runs/{check_run_id}"], + getSuite: ["GET /repos/{owner}/{repo}/check-suites/{check_suite_id}"], + listAnnotations: ["GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations"], + listForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-runs"], + listForSuite: ["GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs"], + listSuitesForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-suites"], + rerequestSuite: ["POST /repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest"], + setSuitesPreferences: ["PATCH /repos/{owner}/{repo}/check-suites/preferences"], + update: ["PATCH /repos/{owner}/{repo}/check-runs/{check_run_id}"] + }, + codeScanning: { + deleteAnalysis: ["DELETE /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}{?confirm_delete}"], + getAlert: ["GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}", {}, { + renamedParameters: { + alert_id: "alert_number" + } + }], + getAnalysis: ["GET /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}"], + getSarif: ["GET /repos/{owner}/{repo}/code-scanning/sarifs/{sarif_id}"], + listAlertInstances: ["GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances"], + listAlertsForRepo: ["GET /repos/{owner}/{repo}/code-scanning/alerts"], + listAlertsInstances: ["GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances", {}, { + renamed: ["codeScanning", "listAlertInstances"] + }], + listRecentAnalyses: ["GET /repos/{owner}/{repo}/code-scanning/analyses"], + updateAlert: ["PATCH /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}"], + uploadSarif: ["POST /repos/{owner}/{repo}/code-scanning/sarifs"] + }, + codesOfConduct: { + getAllCodesOfConduct: ["GET /codes_of_conduct", { + mediaType: { + previews: ["scarlet-witch"] + } + }], + getConductCode: ["GET /codes_of_conduct/{key}", { + mediaType: { + previews: ["scarlet-witch"] + } + }], + getForRepo: ["GET /repos/{owner}/{repo}/community/code_of_conduct", { + mediaType: { + previews: ["scarlet-witch"] + } + }] + }, + emojis: { + get: ["GET /emojis"] + }, + enterpriseAdmin: { + disableSelectedOrganizationGithubActionsEnterprise: ["DELETE /enterprises/{enterprise}/actions/permissions/organizations/{org_id}"], + enableSelectedOrganizationGithubActionsEnterprise: ["PUT /enterprises/{enterprise}/actions/permissions/organizations/{org_id}"], + getAllowedActionsEnterprise: ["GET /enterprises/{enterprise}/actions/permissions/selected-actions"], + getGithubActionsPermissionsEnterprise: ["GET /enterprises/{enterprise}/actions/permissions"], + listSelectedOrganizationsEnabledGithubActionsEnterprise: ["GET /enterprises/{enterprise}/actions/permissions/organizations"], + setAllowedActionsEnterprise: ["PUT /enterprises/{enterprise}/actions/permissions/selected-actions"], + setGithubActionsPermissionsEnterprise: ["PUT /enterprises/{enterprise}/actions/permissions"], + setSelectedOrganizationsEnabledGithubActionsEnterprise: ["PUT /enterprises/{enterprise}/actions/permissions/organizations"] + }, + gists: { + checkIsStarred: ["GET /gists/{gist_id}/star"], + create: ["POST /gists"], + createComment: ["POST /gists/{gist_id}/comments"], + delete: ["DELETE /gists/{gist_id}"], + deleteComment: ["DELETE /gists/{gist_id}/comments/{comment_id}"], + fork: ["POST /gists/{gist_id}/forks"], + get: ["GET /gists/{gist_id}"], + getComment: ["GET /gists/{gist_id}/comments/{comment_id}"], + getRevision: ["GET /gists/{gist_id}/{sha}"], + list: ["GET /gists"], + listComments: ["GET /gists/{gist_id}/comments"], + listCommits: ["GET /gists/{gist_id}/commits"], + listForUser: ["GET /users/{username}/gists"], + listForks: ["GET /gists/{gist_id}/forks"], + listPublic: ["GET /gists/public"], + listStarred: ["GET /gists/starred"], + star: ["PUT /gists/{gist_id}/star"], + unstar: ["DELETE /gists/{gist_id}/star"], + update: ["PATCH /gists/{gist_id}"], + updateComment: ["PATCH /gists/{gist_id}/comments/{comment_id}"] + }, + git: { + createBlob: ["POST /repos/{owner}/{repo}/git/blobs"], + createCommit: ["POST /repos/{owner}/{repo}/git/commits"], + createRef: ["POST /repos/{owner}/{repo}/git/refs"], + createTag: ["POST /repos/{owner}/{repo}/git/tags"], + createTree: ["POST /repos/{owner}/{repo}/git/trees"], + deleteRef: ["DELETE /repos/{owner}/{repo}/git/refs/{ref}"], + getBlob: ["GET /repos/{owner}/{repo}/git/blobs/{file_sha}"], + getCommit: ["GET /repos/{owner}/{repo}/git/commits/{commit_sha}"], + getRef: ["GET /repos/{owner}/{repo}/git/ref/{ref}"], + getTag: ["GET /repos/{owner}/{repo}/git/tags/{tag_sha}"], + getTree: ["GET /repos/{owner}/{repo}/git/trees/{tree_sha}"], + listMatchingRefs: ["GET /repos/{owner}/{repo}/git/matching-refs/{ref}"], + updateRef: ["PATCH /repos/{owner}/{repo}/git/refs/{ref}"] + }, + gitignore: { + getAllTemplates: ["GET /gitignore/templates"], + getTemplate: ["GET /gitignore/templates/{name}"] + }, + interactions: { + getRestrictionsForAuthenticatedUser: ["GET /user/interaction-limits"], + getRestrictionsForOrg: ["GET /orgs/{org}/interaction-limits"], + getRestrictionsForRepo: ["GET /repos/{owner}/{repo}/interaction-limits"], + getRestrictionsForYourPublicRepos: ["GET /user/interaction-limits", {}, { + renamed: ["interactions", "getRestrictionsForAuthenticatedUser"] + }], + removeRestrictionsForAuthenticatedUser: ["DELETE /user/interaction-limits"], + removeRestrictionsForOrg: ["DELETE /orgs/{org}/interaction-limits"], + removeRestrictionsForRepo: ["DELETE /repos/{owner}/{repo}/interaction-limits"], + removeRestrictionsForYourPublicRepos: ["DELETE /user/interaction-limits", {}, { + renamed: ["interactions", "removeRestrictionsForAuthenticatedUser"] + }], + setRestrictionsForAuthenticatedUser: ["PUT /user/interaction-limits"], + setRestrictionsForOrg: ["PUT /orgs/{org}/interaction-limits"], + setRestrictionsForRepo: ["PUT /repos/{owner}/{repo}/interaction-limits"], + setRestrictionsForYourPublicRepos: ["PUT /user/interaction-limits", {}, { + renamed: ["interactions", "setRestrictionsForAuthenticatedUser"] + }] + }, + issues: { + addAssignees: ["POST /repos/{owner}/{repo}/issues/{issue_number}/assignees"], + addLabels: ["POST /repos/{owner}/{repo}/issues/{issue_number}/labels"], + checkUserCanBeAssigned: ["GET /repos/{owner}/{repo}/assignees/{assignee}"], + create: ["POST /repos/{owner}/{repo}/issues"], + createComment: ["POST /repos/{owner}/{repo}/issues/{issue_number}/comments"], + createLabel: ["POST /repos/{owner}/{repo}/labels"], + createMilestone: ["POST /repos/{owner}/{repo}/milestones"], + deleteComment: ["DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}"], + deleteLabel: ["DELETE /repos/{owner}/{repo}/labels/{name}"], + deleteMilestone: ["DELETE /repos/{owner}/{repo}/milestones/{milestone_number}"], + get: ["GET /repos/{owner}/{repo}/issues/{issue_number}"], + getComment: ["GET /repos/{owner}/{repo}/issues/comments/{comment_id}"], + getEvent: ["GET /repos/{owner}/{repo}/issues/events/{event_id}"], + getLabel: ["GET /repos/{owner}/{repo}/labels/{name}"], + getMilestone: ["GET /repos/{owner}/{repo}/milestones/{milestone_number}"], + list: ["GET /issues"], + listAssignees: ["GET /repos/{owner}/{repo}/assignees"], + listComments: ["GET /repos/{owner}/{repo}/issues/{issue_number}/comments"], + listCommentsForRepo: ["GET /repos/{owner}/{repo}/issues/comments"], + listEvents: ["GET /repos/{owner}/{repo}/issues/{issue_number}/events"], + listEventsForRepo: ["GET /repos/{owner}/{repo}/issues/events"], + listEventsForTimeline: ["GET /repos/{owner}/{repo}/issues/{issue_number}/timeline", { + mediaType: { + previews: ["mockingbird"] + } + }], + listForAuthenticatedUser: ["GET /user/issues"], + listForOrg: ["GET /orgs/{org}/issues"], + listForRepo: ["GET /repos/{owner}/{repo}/issues"], + listLabelsForMilestone: ["GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels"], + listLabelsForRepo: ["GET /repos/{owner}/{repo}/labels"], + listLabelsOnIssue: ["GET /repos/{owner}/{repo}/issues/{issue_number}/labels"], + listMilestones: ["GET /repos/{owner}/{repo}/milestones"], + lock: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/lock"], + removeAllLabels: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels"], + removeAssignees: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/assignees"], + removeLabel: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels/{name}"], + setLabels: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/labels"], + unlock: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/lock"], + update: ["PATCH /repos/{owner}/{repo}/issues/{issue_number}"], + updateComment: ["PATCH /repos/{owner}/{repo}/issues/comments/{comment_id}"], + updateLabel: ["PATCH /repos/{owner}/{repo}/labels/{name}"], + updateMilestone: ["PATCH /repos/{owner}/{repo}/milestones/{milestone_number}"] + }, + licenses: { + get: ["GET /licenses/{license}"], + getAllCommonlyUsed: ["GET /licenses"], + getForRepo: ["GET /repos/{owner}/{repo}/license"] + }, + markdown: { + render: ["POST /markdown"], + renderRaw: ["POST /markdown/raw", { + headers: { + "content-type": "text/plain; charset=utf-8" + } + }] + }, + meta: { + get: ["GET /meta"], + getOctocat: ["GET /octocat"], + getZen: ["GET /zen"], + root: ["GET /"] + }, + migrations: { + cancelImport: ["DELETE /repos/{owner}/{repo}/import"], + deleteArchiveForAuthenticatedUser: ["DELETE /user/migrations/{migration_id}/archive", { + mediaType: { + previews: ["wyandotte"] + } + }], + deleteArchiveForOrg: ["DELETE /orgs/{org}/migrations/{migration_id}/archive", { + mediaType: { + previews: ["wyandotte"] + } + }], + downloadArchiveForOrg: ["GET /orgs/{org}/migrations/{migration_id}/archive", { + mediaType: { + previews: ["wyandotte"] + } + }], + getArchiveForAuthenticatedUser: ["GET /user/migrations/{migration_id}/archive", { + mediaType: { + previews: ["wyandotte"] + } + }], + getCommitAuthors: ["GET /repos/{owner}/{repo}/import/authors"], + getImportStatus: ["GET /repos/{owner}/{repo}/import"], + getLargeFiles: ["GET /repos/{owner}/{repo}/import/large_files"], + getStatusForAuthenticatedUser: ["GET /user/migrations/{migration_id}", { + mediaType: { + previews: ["wyandotte"] + } + }], + getStatusForOrg: ["GET /orgs/{org}/migrations/{migration_id}", { + mediaType: { + previews: ["wyandotte"] + } + }], + listForAuthenticatedUser: ["GET /user/migrations", { + mediaType: { + previews: ["wyandotte"] + } + }], + listForOrg: ["GET /orgs/{org}/migrations", { + mediaType: { + previews: ["wyandotte"] + } + }], + listReposForOrg: ["GET /orgs/{org}/migrations/{migration_id}/repositories", { + mediaType: { + previews: ["wyandotte"] + } + }], + listReposForUser: ["GET /user/migrations/{migration_id}/repositories", { + mediaType: { + previews: ["wyandotte"] + } + }], + mapCommitAuthor: ["PATCH /repos/{owner}/{repo}/import/authors/{author_id}"], + setLfsPreference: ["PATCH /repos/{owner}/{repo}/import/lfs"], + startForAuthenticatedUser: ["POST /user/migrations"], + startForOrg: ["POST /orgs/{org}/migrations"], + startImport: ["PUT /repos/{owner}/{repo}/import"], + unlockRepoForAuthenticatedUser: ["DELETE /user/migrations/{migration_id}/repos/{repo_name}/lock", { + mediaType: { + previews: ["wyandotte"] + } + }], + unlockRepoForOrg: ["DELETE /orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock", { + mediaType: { + previews: ["wyandotte"] + } + }], + updateImport: ["PATCH /repos/{owner}/{repo}/import"] + }, + orgs: { + blockUser: ["PUT /orgs/{org}/blocks/{username}"], + cancelInvitation: ["DELETE /orgs/{org}/invitations/{invitation_id}"], + checkBlockedUser: ["GET /orgs/{org}/blocks/{username}"], + checkMembershipForUser: ["GET /orgs/{org}/members/{username}"], + checkPublicMembershipForUser: ["GET /orgs/{org}/public_members/{username}"], + convertMemberToOutsideCollaborator: ["PUT /orgs/{org}/outside_collaborators/{username}"], + createInvitation: ["POST /orgs/{org}/invitations"], + createWebhook: ["POST /orgs/{org}/hooks"], + deleteWebhook: ["DELETE /orgs/{org}/hooks/{hook_id}"], + get: ["GET /orgs/{org}"], + getMembershipForAuthenticatedUser: ["GET /user/memberships/orgs/{org}"], + getMembershipForUser: ["GET /orgs/{org}/memberships/{username}"], + getWebhook: ["GET /orgs/{org}/hooks/{hook_id}"], + getWebhookConfigForOrg: ["GET /orgs/{org}/hooks/{hook_id}/config"], + list: ["GET /organizations"], + listAppInstallations: ["GET /orgs/{org}/installations"], + listBlockedUsers: ["GET /orgs/{org}/blocks"], + listFailedInvitations: ["GET /orgs/{org}/failed_invitations"], + listForAuthenticatedUser: ["GET /user/orgs"], + listForUser: ["GET /users/{username}/orgs"], + listInvitationTeams: ["GET /orgs/{org}/invitations/{invitation_id}/teams"], + listMembers: ["GET /orgs/{org}/members"], + listMembershipsForAuthenticatedUser: ["GET /user/memberships/orgs"], + listOutsideCollaborators: ["GET /orgs/{org}/outside_collaborators"], + listPendingInvitations: ["GET /orgs/{org}/invitations"], + listPublicMembers: ["GET /orgs/{org}/public_members"], + listWebhooks: ["GET /orgs/{org}/hooks"], + pingWebhook: ["POST /orgs/{org}/hooks/{hook_id}/pings"], + removeMember: ["DELETE /orgs/{org}/members/{username}"], + removeMembershipForUser: ["DELETE /orgs/{org}/memberships/{username}"], + removeOutsideCollaborator: ["DELETE /orgs/{org}/outside_collaborators/{username}"], + removePublicMembershipForAuthenticatedUser: ["DELETE /orgs/{org}/public_members/{username}"], + setMembershipForUser: ["PUT /orgs/{org}/memberships/{username}"], + setPublicMembershipForAuthenticatedUser: ["PUT /orgs/{org}/public_members/{username}"], + unblockUser: ["DELETE /orgs/{org}/blocks/{username}"], + update: ["PATCH /orgs/{org}"], + updateMembershipForAuthenticatedUser: ["PATCH /user/memberships/orgs/{org}"], + updateWebhook: ["PATCH /orgs/{org}/hooks/{hook_id}"], + updateWebhookConfigForOrg: ["PATCH /orgs/{org}/hooks/{hook_id}/config"] + }, + packages: { + deletePackageForAuthenticatedUser: ["DELETE /user/packages/{package_type}/{package_name}"], + deletePackageForOrg: ["DELETE /orgs/{org}/packages/{package_type}/{package_name}"], + deletePackageVersionForAuthenticatedUser: ["DELETE /user/packages/{package_type}/{package_name}/versions/{package_version_id}"], + deletePackageVersionForOrg: ["DELETE /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}"], + getAllPackageVersionsForAPackageOwnedByAnOrg: ["GET /orgs/{org}/packages/{package_type}/{package_name}/versions", {}, { + renamed: ["packages", "getAllPackageVersionsForPackageOwnedByOrg"] + }], + getAllPackageVersionsForAPackageOwnedByTheAuthenticatedUser: ["GET /user/packages/{package_type}/{package_name}/versions", {}, { + renamed: ["packages", "getAllPackageVersionsForPackageOwnedByAuthenticatedUser"] + }], + getAllPackageVersionsForPackageOwnedByAuthenticatedUser: ["GET /user/packages/{package_type}/{package_name}/versions"], + getAllPackageVersionsForPackageOwnedByOrg: ["GET /orgs/{org}/packages/{package_type}/{package_name}/versions"], + getAllPackageVersionsForPackageOwnedByUser: ["GET /users/{username}/packages/{package_type}/{package_name}/versions"], + getPackageForAuthenticatedUser: ["GET /user/packages/{package_type}/{package_name}"], + getPackageForOrganization: ["GET /orgs/{org}/packages/{package_type}/{package_name}"], + getPackageForUser: ["GET /users/{username}/packages/{package_type}/{package_name}"], + getPackageVersionForAuthenticatedUser: ["GET /user/packages/{package_type}/{package_name}/versions/{package_version_id}"], + getPackageVersionForOrganization: ["GET /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}"], + getPackageVersionForUser: ["GET /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}"], + restorePackageForAuthenticatedUser: ["POST /user/packages/{package_type}/{package_name}/restore{?token}"], + restorePackageForOrg: ["POST /orgs/{org}/packages/{package_type}/{package_name}/restore{?token}"], + restorePackageVersionForAuthenticatedUser: ["POST /user/packages/{package_type}/{package_name}/versions/{package_version_id}/restore"], + restorePackageVersionForOrg: ["POST /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore"] + }, + projects: { + addCollaborator: ["PUT /projects/{project_id}/collaborators/{username}", { + mediaType: { + previews: ["inertia"] + } + }], + createCard: ["POST /projects/columns/{column_id}/cards", { + mediaType: { + previews: ["inertia"] + } + }], + createColumn: ["POST /projects/{project_id}/columns", { + mediaType: { + previews: ["inertia"] + } + }], + createForAuthenticatedUser: ["POST /user/projects", { + mediaType: { + previews: ["inertia"] + } + }], + createForOrg: ["POST /orgs/{org}/projects", { + mediaType: { + previews: ["inertia"] + } + }], + createForRepo: ["POST /repos/{owner}/{repo}/projects", { + mediaType: { + previews: ["inertia"] + } + }], + delete: ["DELETE /projects/{project_id}", { + mediaType: { + previews: ["inertia"] + } + }], + deleteCard: ["DELETE /projects/columns/cards/{card_id}", { + mediaType: { + previews: ["inertia"] + } + }], + deleteColumn: ["DELETE /projects/columns/{column_id}", { + mediaType: { + previews: ["inertia"] + } + }], + get: ["GET /projects/{project_id}", { + mediaType: { + previews: ["inertia"] + } + }], + getCard: ["GET /projects/columns/cards/{card_id}", { + mediaType: { + previews: ["inertia"] + } + }], + getColumn: ["GET /projects/columns/{column_id}", { + mediaType: { + previews: ["inertia"] + } + }], + getPermissionForUser: ["GET /projects/{project_id}/collaborators/{username}/permission", { + mediaType: { + previews: ["inertia"] + } + }], + listCards: ["GET /projects/columns/{column_id}/cards", { + mediaType: { + previews: ["inertia"] + } + }], + listCollaborators: ["GET /projects/{project_id}/collaborators", { + mediaType: { + previews: ["inertia"] + } + }], + listColumns: ["GET /projects/{project_id}/columns", { + mediaType: { + previews: ["inertia"] + } + }], + listForOrg: ["GET /orgs/{org}/projects", { + mediaType: { + previews: ["inertia"] + } + }], + listForRepo: ["GET /repos/{owner}/{repo}/projects", { + mediaType: { + previews: ["inertia"] + } + }], + listForUser: ["GET /users/{username}/projects", { + mediaType: { + previews: ["inertia"] + } + }], + moveCard: ["POST /projects/columns/cards/{card_id}/moves", { + mediaType: { + previews: ["inertia"] + } + }], + moveColumn: ["POST /projects/columns/{column_id}/moves", { + mediaType: { + previews: ["inertia"] + } + }], + removeCollaborator: ["DELETE /projects/{project_id}/collaborators/{username}", { + mediaType: { + previews: ["inertia"] + } + }], + update: ["PATCH /projects/{project_id}", { + mediaType: { + previews: ["inertia"] + } + }], + updateCard: ["PATCH /projects/columns/cards/{card_id}", { + mediaType: { + previews: ["inertia"] + } + }], + updateColumn: ["PATCH /projects/columns/{column_id}", { + mediaType: { + previews: ["inertia"] + } + }] + }, + pulls: { + checkIfMerged: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/merge"], + create: ["POST /repos/{owner}/{repo}/pulls"], + createReplyForReviewComment: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies"], + createReview: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews"], + createReviewComment: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/comments"], + deletePendingReview: ["DELETE /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"], + deleteReviewComment: ["DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}"], + dismissReview: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals"], + get: ["GET /repos/{owner}/{repo}/pulls/{pull_number}"], + getReview: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"], + getReviewComment: ["GET /repos/{owner}/{repo}/pulls/comments/{comment_id}"], + list: ["GET /repos/{owner}/{repo}/pulls"], + listCommentsForReview: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments"], + listCommits: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/commits"], + listFiles: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/files"], + listRequestedReviewers: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"], + listReviewComments: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/comments"], + listReviewCommentsForRepo: ["GET /repos/{owner}/{repo}/pulls/comments"], + listReviews: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews"], + merge: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/merge"], + removeRequestedReviewers: ["DELETE /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"], + requestReviewers: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"], + submitReview: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events"], + update: ["PATCH /repos/{owner}/{repo}/pulls/{pull_number}"], + updateBranch: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/update-branch", { + mediaType: { + previews: ["lydian"] + } + }], + updateReview: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"], + updateReviewComment: ["PATCH /repos/{owner}/{repo}/pulls/comments/{comment_id}"] + }, + rateLimit: { + get: ["GET /rate_limit"] + }, + reactions: { + createForCommitComment: ["POST /repos/{owner}/{repo}/comments/{comment_id}/reactions", { + mediaType: { + previews: ["squirrel-girl"] + } + }], + createForIssue: ["POST /repos/{owner}/{repo}/issues/{issue_number}/reactions", { + mediaType: { + previews: ["squirrel-girl"] + } + }], + createForIssueComment: ["POST /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions", { + mediaType: { + previews: ["squirrel-girl"] + } + }], + createForPullRequestReviewComment: ["POST /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions", { + mediaType: { + previews: ["squirrel-girl"] + } + }], + createForRelease: ["POST /repos/{owner}/{repo}/releases/{release_id}/reactions", { + mediaType: { + previews: ["squirrel-girl"] + } + }], + createForTeamDiscussionCommentInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions", { + mediaType: { + previews: ["squirrel-girl"] + } + }], + createForTeamDiscussionInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions", { + mediaType: { + previews: ["squirrel-girl"] + } + }], + deleteForCommitComment: ["DELETE /repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}", { + mediaType: { + previews: ["squirrel-girl"] + } + }], + deleteForIssue: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}", { + mediaType: { + previews: ["squirrel-girl"] + } + }], + deleteForIssueComment: ["DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}", { + mediaType: { + previews: ["squirrel-girl"] + } + }], + deleteForPullRequestComment: ["DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}", { + mediaType: { + previews: ["squirrel-girl"] + } + }], + deleteForTeamDiscussion: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}", { + mediaType: { + previews: ["squirrel-girl"] + } + }], + deleteForTeamDiscussionComment: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}", { + mediaType: { + previews: ["squirrel-girl"] + } + }], + deleteLegacy: ["DELETE /reactions/{reaction_id}", { + mediaType: { + previews: ["squirrel-girl"] + } + }, { + deprecated: "octokit.rest.reactions.deleteLegacy() is deprecated, see https://docs.github.com/rest/reference/reactions/#delete-a-reaction-legacy" + }], + listForCommitComment: ["GET /repos/{owner}/{repo}/comments/{comment_id}/reactions", { + mediaType: { + previews: ["squirrel-girl"] + } + }], + listForIssue: ["GET /repos/{owner}/{repo}/issues/{issue_number}/reactions", { + mediaType: { + previews: ["squirrel-girl"] + } + }], + listForIssueComment: ["GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions", { + mediaType: { + previews: ["squirrel-girl"] + } + }], + listForPullRequestReviewComment: ["GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions", { + mediaType: { + previews: ["squirrel-girl"] + } + }], + listForTeamDiscussionCommentInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions", { + mediaType: { + previews: ["squirrel-girl"] + } + }], + listForTeamDiscussionInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions", { + mediaType: { + previews: ["squirrel-girl"] + } + }] + }, + repos: { + acceptInvitation: ["PATCH /user/repository_invitations/{invitation_id}"], + addAppAccessRestrictions: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, { + mapToData: "apps" + }], + addCollaborator: ["PUT /repos/{owner}/{repo}/collaborators/{username}"], + addStatusCheckContexts: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, { + mapToData: "contexts" + }], + addTeamAccessRestrictions: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, { + mapToData: "teams" + }], + addUserAccessRestrictions: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, { + mapToData: "users" + }], + checkCollaborator: ["GET /repos/{owner}/{repo}/collaborators/{username}"], + checkVulnerabilityAlerts: ["GET /repos/{owner}/{repo}/vulnerability-alerts", { + mediaType: { + previews: ["dorian"] + } + }], + compareCommits: ["GET /repos/{owner}/{repo}/compare/{base}...{head}"], + compareCommitsWithBasehead: ["GET /repos/{owner}/{repo}/compare/{basehead}"], + createCommitComment: ["POST /repos/{owner}/{repo}/commits/{commit_sha}/comments"], + createCommitSignatureProtection: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures", { + mediaType: { + previews: ["zzzax"] + } + }], + createCommitStatus: ["POST /repos/{owner}/{repo}/statuses/{sha}"], + createDeployKey: ["POST /repos/{owner}/{repo}/keys"], + createDeployment: ["POST /repos/{owner}/{repo}/deployments"], + createDeploymentStatus: ["POST /repos/{owner}/{repo}/deployments/{deployment_id}/statuses"], + createDispatchEvent: ["POST /repos/{owner}/{repo}/dispatches"], + createForAuthenticatedUser: ["POST /user/repos"], + createFork: ["POST /repos/{owner}/{repo}/forks"], + createInOrg: ["POST /orgs/{org}/repos"], + createOrUpdateEnvironment: ["PUT /repos/{owner}/{repo}/environments/{environment_name}"], + createOrUpdateFileContents: ["PUT /repos/{owner}/{repo}/contents/{path}"], + createPagesSite: ["POST /repos/{owner}/{repo}/pages", { + mediaType: { + previews: ["switcheroo"] + } + }], + createRelease: ["POST /repos/{owner}/{repo}/releases"], + createUsingTemplate: ["POST /repos/{template_owner}/{template_repo}/generate", { + mediaType: { + previews: ["baptiste"] + } + }], + createWebhook: ["POST /repos/{owner}/{repo}/hooks"], + declineInvitation: ["DELETE /user/repository_invitations/{invitation_id}"], + delete: ["DELETE /repos/{owner}/{repo}"], + deleteAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions"], + deleteAdminBranchProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"], + deleteAnEnvironment: ["DELETE /repos/{owner}/{repo}/environments/{environment_name}"], + deleteBranchProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection"], + deleteCommitComment: ["DELETE /repos/{owner}/{repo}/comments/{comment_id}"], + deleteCommitSignatureProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures", { + mediaType: { + previews: ["zzzax"] + } + }], + deleteDeployKey: ["DELETE /repos/{owner}/{repo}/keys/{key_id}"], + deleteDeployment: ["DELETE /repos/{owner}/{repo}/deployments/{deployment_id}"], + deleteFile: ["DELETE /repos/{owner}/{repo}/contents/{path}"], + deleteInvitation: ["DELETE /repos/{owner}/{repo}/invitations/{invitation_id}"], + deletePagesSite: ["DELETE /repos/{owner}/{repo}/pages", { + mediaType: { + previews: ["switcheroo"] + } + }], + deletePullRequestReviewProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"], + deleteRelease: ["DELETE /repos/{owner}/{repo}/releases/{release_id}"], + deleteReleaseAsset: ["DELETE /repos/{owner}/{repo}/releases/assets/{asset_id}"], + deleteWebhook: ["DELETE /repos/{owner}/{repo}/hooks/{hook_id}"], + disableAutomatedSecurityFixes: ["DELETE /repos/{owner}/{repo}/automated-security-fixes", { + mediaType: { + previews: ["london"] + } + }], + disableVulnerabilityAlerts: ["DELETE /repos/{owner}/{repo}/vulnerability-alerts", { + mediaType: { + previews: ["dorian"] + } + }], + downloadArchive: ["GET /repos/{owner}/{repo}/zipball/{ref}", {}, { + renamed: ["repos", "downloadZipballArchive"] + }], + downloadTarballArchive: ["GET /repos/{owner}/{repo}/tarball/{ref}"], + downloadZipballArchive: ["GET /repos/{owner}/{repo}/zipball/{ref}"], + enableAutomatedSecurityFixes: ["PUT /repos/{owner}/{repo}/automated-security-fixes", { + mediaType: { + previews: ["london"] + } + }], + enableVulnerabilityAlerts: ["PUT /repos/{owner}/{repo}/vulnerability-alerts", { + mediaType: { + previews: ["dorian"] + } + }], + get: ["GET /repos/{owner}/{repo}"], + getAccessRestrictions: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions"], + getAdminBranchProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"], + getAllEnvironments: ["GET /repos/{owner}/{repo}/environments"], + getAllStatusCheckContexts: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts"], + getAllTopics: ["GET /repos/{owner}/{repo}/topics", { + mediaType: { + previews: ["mercy"] + } + }], + getAppsWithAccessToProtectedBranch: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps"], + getBranch: ["GET /repos/{owner}/{repo}/branches/{branch}"], + getBranchProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection"], + getClones: ["GET /repos/{owner}/{repo}/traffic/clones"], + getCodeFrequencyStats: ["GET /repos/{owner}/{repo}/stats/code_frequency"], + getCollaboratorPermissionLevel: ["GET /repos/{owner}/{repo}/collaborators/{username}/permission"], + getCombinedStatusForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/status"], + getCommit: ["GET /repos/{owner}/{repo}/commits/{ref}"], + getCommitActivityStats: ["GET /repos/{owner}/{repo}/stats/commit_activity"], + getCommitComment: ["GET /repos/{owner}/{repo}/comments/{comment_id}"], + getCommitSignatureProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures", { + mediaType: { + previews: ["zzzax"] + } + }], + getCommunityProfileMetrics: ["GET /repos/{owner}/{repo}/community/profile"], + getContent: ["GET /repos/{owner}/{repo}/contents/{path}"], + getContributorsStats: ["GET /repos/{owner}/{repo}/stats/contributors"], + getDeployKey: ["GET /repos/{owner}/{repo}/keys/{key_id}"], + getDeployment: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}"], + getDeploymentStatus: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}"], + getEnvironment: ["GET /repos/{owner}/{repo}/environments/{environment_name}"], + getLatestPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/latest"], + getLatestRelease: ["GET /repos/{owner}/{repo}/releases/latest"], + getPages: ["GET /repos/{owner}/{repo}/pages"], + getPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/{build_id}"], + getPagesHealthCheck: ["GET /repos/{owner}/{repo}/pages/health"], + getParticipationStats: ["GET /repos/{owner}/{repo}/stats/participation"], + getPullRequestReviewProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"], + getPunchCardStats: ["GET /repos/{owner}/{repo}/stats/punch_card"], + getReadme: ["GET /repos/{owner}/{repo}/readme"], + getReadmeInDirectory: ["GET /repos/{owner}/{repo}/readme/{dir}"], + getRelease: ["GET /repos/{owner}/{repo}/releases/{release_id}"], + getReleaseAsset: ["GET /repos/{owner}/{repo}/releases/assets/{asset_id}"], + getReleaseByTag: ["GET /repos/{owner}/{repo}/releases/tags/{tag}"], + getStatusChecksProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"], + getTeamsWithAccessToProtectedBranch: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams"], + getTopPaths: ["GET /repos/{owner}/{repo}/traffic/popular/paths"], + getTopReferrers: ["GET /repos/{owner}/{repo}/traffic/popular/referrers"], + getUsersWithAccessToProtectedBranch: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users"], + getViews: ["GET /repos/{owner}/{repo}/traffic/views"], + getWebhook: ["GET /repos/{owner}/{repo}/hooks/{hook_id}"], + getWebhookConfigForRepo: ["GET /repos/{owner}/{repo}/hooks/{hook_id}/config"], + listBranches: ["GET /repos/{owner}/{repo}/branches"], + listBranchesForHeadCommit: ["GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head", { + mediaType: { + previews: ["groot"] + } + }], + listCollaborators: ["GET /repos/{owner}/{repo}/collaborators"], + listCommentsForCommit: ["GET /repos/{owner}/{repo}/commits/{commit_sha}/comments"], + listCommitCommentsForRepo: ["GET /repos/{owner}/{repo}/comments"], + listCommitStatusesForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/statuses"], + listCommits: ["GET /repos/{owner}/{repo}/commits"], + listContributors: ["GET /repos/{owner}/{repo}/contributors"], + listDeployKeys: ["GET /repos/{owner}/{repo}/keys"], + listDeploymentStatuses: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses"], + listDeployments: ["GET /repos/{owner}/{repo}/deployments"], + listForAuthenticatedUser: ["GET /user/repos"], + listForOrg: ["GET /orgs/{org}/repos"], + listForUser: ["GET /users/{username}/repos"], + listForks: ["GET /repos/{owner}/{repo}/forks"], + listInvitations: ["GET /repos/{owner}/{repo}/invitations"], + listInvitationsForAuthenticatedUser: ["GET /user/repository_invitations"], + listLanguages: ["GET /repos/{owner}/{repo}/languages"], + listPagesBuilds: ["GET /repos/{owner}/{repo}/pages/builds"], + listPublic: ["GET /repositories"], + listPullRequestsAssociatedWithCommit: ["GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls", { + mediaType: { + previews: ["groot"] + } + }], + listReleaseAssets: ["GET /repos/{owner}/{repo}/releases/{release_id}/assets"], + listReleases: ["GET /repos/{owner}/{repo}/releases"], + listTags: ["GET /repos/{owner}/{repo}/tags"], + listTeams: ["GET /repos/{owner}/{repo}/teams"], + listWebhooks: ["GET /repos/{owner}/{repo}/hooks"], + merge: ["POST /repos/{owner}/{repo}/merges"], + pingWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/pings"], + removeAppAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, { + mapToData: "apps" + }], + removeCollaborator: ["DELETE /repos/{owner}/{repo}/collaborators/{username}"], + removeStatusCheckContexts: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, { + mapToData: "contexts" + }], + removeStatusCheckProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"], + removeTeamAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, { + mapToData: "teams" + }], + removeUserAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, { + mapToData: "users" + }], + renameBranch: ["POST /repos/{owner}/{repo}/branches/{branch}/rename"], + replaceAllTopics: ["PUT /repos/{owner}/{repo}/topics", { + mediaType: { + previews: ["mercy"] + } + }], + requestPagesBuild: ["POST /repos/{owner}/{repo}/pages/builds"], + setAdminBranchProtection: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"], + setAppAccessRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, { + mapToData: "apps" + }], + setStatusCheckContexts: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, { + mapToData: "contexts" + }], + setTeamAccessRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, { + mapToData: "teams" + }], + setUserAccessRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, { + mapToData: "users" + }], + testPushWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/tests"], + transfer: ["POST /repos/{owner}/{repo}/transfer"], + update: ["PATCH /repos/{owner}/{repo}"], + updateBranchProtection: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection"], + updateCommitComment: ["PATCH /repos/{owner}/{repo}/comments/{comment_id}"], + updateInformationAboutPagesSite: ["PUT /repos/{owner}/{repo}/pages"], + updateInvitation: ["PATCH /repos/{owner}/{repo}/invitations/{invitation_id}"], + updatePullRequestReviewProtection: ["PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"], + updateRelease: ["PATCH /repos/{owner}/{repo}/releases/{release_id}"], + updateReleaseAsset: ["PATCH /repos/{owner}/{repo}/releases/assets/{asset_id}"], + updateStatusCheckPotection: ["PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks", {}, { + renamed: ["repos", "updateStatusCheckProtection"] + }], + updateStatusCheckProtection: ["PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"], + updateWebhook: ["PATCH /repos/{owner}/{repo}/hooks/{hook_id}"], + updateWebhookConfigForRepo: ["PATCH /repos/{owner}/{repo}/hooks/{hook_id}/config"], + uploadReleaseAsset: ["POST /repos/{owner}/{repo}/releases/{release_id}/assets{?name,label}", { + baseUrl: "https://uploads.github.com" + }] + }, + search: { + code: ["GET /search/code"], + commits: ["GET /search/commits", { + mediaType: { + previews: ["cloak"] + } + }], + issuesAndPullRequests: ["GET /search/issues"], + labels: ["GET /search/labels"], + repos: ["GET /search/repositories"], + topics: ["GET /search/topics", { + mediaType: { + previews: ["mercy"] + } + }], + users: ["GET /search/users"] + }, + secretScanning: { + getAlert: ["GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}"], + listAlertsForRepo: ["GET /repos/{owner}/{repo}/secret-scanning/alerts"], + updateAlert: ["PATCH /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}"] + }, + teams: { + addOrUpdateMembershipForUserInOrg: ["PUT /orgs/{org}/teams/{team_slug}/memberships/{username}"], + addOrUpdateProjectPermissionsInOrg: ["PUT /orgs/{org}/teams/{team_slug}/projects/{project_id}", { + mediaType: { + previews: ["inertia"] + } + }], + addOrUpdateRepoPermissionsInOrg: ["PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"], + checkPermissionsForProjectInOrg: ["GET /orgs/{org}/teams/{team_slug}/projects/{project_id}", { + mediaType: { + previews: ["inertia"] + } + }], + checkPermissionsForRepoInOrg: ["GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"], + create: ["POST /orgs/{org}/teams"], + createDiscussionCommentInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments"], + createDiscussionInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions"], + deleteDiscussionCommentInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"], + deleteDiscussionInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"], + deleteInOrg: ["DELETE /orgs/{org}/teams/{team_slug}"], + getByName: ["GET /orgs/{org}/teams/{team_slug}"], + getDiscussionCommentInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"], + getDiscussionInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"], + getMembershipForUserInOrg: ["GET /orgs/{org}/teams/{team_slug}/memberships/{username}"], + list: ["GET /orgs/{org}/teams"], + listChildInOrg: ["GET /orgs/{org}/teams/{team_slug}/teams"], + listDiscussionCommentsInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments"], + listDiscussionsInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions"], + listForAuthenticatedUser: ["GET /user/teams"], + listMembersInOrg: ["GET /orgs/{org}/teams/{team_slug}/members"], + listPendingInvitationsInOrg: ["GET /orgs/{org}/teams/{team_slug}/invitations"], + listProjectsInOrg: ["GET /orgs/{org}/teams/{team_slug}/projects", { + mediaType: { + previews: ["inertia"] + } + }], + listReposInOrg: ["GET /orgs/{org}/teams/{team_slug}/repos"], + removeMembershipForUserInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/memberships/{username}"], + removeProjectInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/projects/{project_id}"], + removeRepoInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"], + updateDiscussionCommentInOrg: ["PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"], + updateDiscussionInOrg: ["PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"], + updateInOrg: ["PATCH /orgs/{org}/teams/{team_slug}"] + }, + users: { + addEmailForAuthenticated: ["POST /user/emails"], + block: ["PUT /user/blocks/{username}"], + checkBlocked: ["GET /user/blocks/{username}"], + checkFollowingForUser: ["GET /users/{username}/following/{target_user}"], + checkPersonIsFollowedByAuthenticated: ["GET /user/following/{username}"], + createGpgKeyForAuthenticated: ["POST /user/gpg_keys"], + createPublicSshKeyForAuthenticated: ["POST /user/keys"], + deleteEmailForAuthenticated: ["DELETE /user/emails"], + deleteGpgKeyForAuthenticated: ["DELETE /user/gpg_keys/{gpg_key_id}"], + deletePublicSshKeyForAuthenticated: ["DELETE /user/keys/{key_id}"], + follow: ["PUT /user/following/{username}"], + getAuthenticated: ["GET /user"], + getByUsername: ["GET /users/{username}"], + getContextForUser: ["GET /users/{username}/hovercard"], + getGpgKeyForAuthenticated: ["GET /user/gpg_keys/{gpg_key_id}"], + getPublicSshKeyForAuthenticated: ["GET /user/keys/{key_id}"], + list: ["GET /users"], + listBlockedByAuthenticated: ["GET /user/blocks"], + listEmailsForAuthenticated: ["GET /user/emails"], + listFollowedByAuthenticated: ["GET /user/following"], + listFollowersForAuthenticatedUser: ["GET /user/followers"], + listFollowersForUser: ["GET /users/{username}/followers"], + listFollowingForUser: ["GET /users/{username}/following"], + listGpgKeysForAuthenticated: ["GET /user/gpg_keys"], + listGpgKeysForUser: ["GET /users/{username}/gpg_keys"], + listPublicEmailsForAuthenticated: ["GET /user/public_emails"], + listPublicKeysForUser: ["GET /users/{username}/keys"], + listPublicSshKeysForAuthenticated: ["GET /user/keys"], + setPrimaryEmailVisibilityForAuthenticated: ["PATCH /user/email/visibility"], + unblock: ["DELETE /user/blocks/{username}"], + unfollow: ["DELETE /user/following/{username}"], + updateAuthenticated: ["PATCH /user"] + } +}; + +const VERSION = "5.3.7"; + +function endpointsToMethods(octokit, endpointsMap) { + const newMethods = {}; + + for (const [scope, endpoints] of Object.entries(endpointsMap)) { + for (const [methodName, endpoint] of Object.entries(endpoints)) { + const [route, defaults, decorations] = endpoint; + const [method, url] = route.split(/ /); + const endpointDefaults = Object.assign({ + method, + url + }, defaults); + + if (!newMethods[scope]) { + newMethods[scope] = {}; + } + + const scopeMethods = newMethods[scope]; + + if (decorations) { + scopeMethods[methodName] = decorate(octokit, scope, methodName, endpointDefaults, decorations); + continue; + } + + scopeMethods[methodName] = octokit.request.defaults(endpointDefaults); + } + } + + return newMethods; +} + +function decorate(octokit, scope, methodName, defaults, decorations) { + const requestWithDefaults = octokit.request.defaults(defaults); + /* istanbul ignore next */ + + function withDecorations(...args) { + // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488 + let options = requestWithDefaults.endpoint.merge(...args); // There are currently no other decorations than `.mapToData` + + if (decorations.mapToData) { + options = Object.assign({}, options, { + data: options[decorations.mapToData], + [decorations.mapToData]: undefined + }); + return requestWithDefaults(options); + } + + if (decorations.renamed) { + const [newScope, newMethodName] = decorations.renamed; + octokit.log.warn(`octokit.${scope}.${methodName}() has been renamed to octokit.${newScope}.${newMethodName}()`); + } + + if (decorations.deprecated) { + octokit.log.warn(decorations.deprecated); + } + + if (decorations.renamedParameters) { + // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488 + const options = requestWithDefaults.endpoint.merge(...args); + + for (const [name, alias] of Object.entries(decorations.renamedParameters)) { + if (name in options) { + octokit.log.warn(`"${name}" parameter is deprecated for "octokit.${scope}.${methodName}()". Use "${alias}" instead`); + + if (!(alias in options)) { + options[alias] = options[name]; + } + + delete options[name]; + } + } + + return requestWithDefaults(options); + } // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488 + + + return requestWithDefaults(...args); + } + + return Object.assign(withDecorations, requestWithDefaults); +} + +function restEndpointMethods(octokit) { + const api = endpointsToMethods(octokit, Endpoints); + return { + rest: api + }; +} +restEndpointMethods.VERSION = VERSION; +function legacyRestEndpointMethods(octokit) { + const api = endpointsToMethods(octokit, Endpoints); + return _objectSpread2(_objectSpread2({}, api), {}, { + rest: api + }); +} +legacyRestEndpointMethods.VERSION = VERSION; + +exports.legacyRestEndpointMethods = legacyRestEndpointMethods; +exports.restEndpointMethods = restEndpointMethods; +//# sourceMappingURL=index.js.map + + +/***/ }), + +/***/ 6514: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ value: true })); + +function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } + +var deprecation = __nccwpck_require__(5887); +var once = _interopDefault(__nccwpck_require__(447)); + +const logOnceCode = once(deprecation => console.warn(deprecation)); +const logOnceHeaders = once(deprecation => console.warn(deprecation)); +/** + * Error with extra properties to help with debugging + */ + +class RequestError extends Error { + constructor(message, statusCode, options) { + super(message); // Maintains proper stack trace (only available on V8) + + /* istanbul ignore next */ + + if (Error.captureStackTrace) { + Error.captureStackTrace(this, this.constructor); + } + + this.name = "HttpError"; + this.status = statusCode; + let headers; + + if ("headers" in options && typeof options.headers !== "undefined") { + headers = options.headers; + } + + if ("response" in options) { + this.response = options.response; + headers = options.response.headers; + } // redact request credentials without mutating original request options + + + const requestCopy = Object.assign({}, options.request); + + if (options.request.headers.authorization) { + requestCopy.headers = Object.assign({}, options.request.headers, { + authorization: options.request.headers.authorization.replace(/ .*$/, " [REDACTED]") + }); + } + + requestCopy.url = requestCopy.url // client_id & client_secret can be passed as URL query parameters to increase rate limit + // see https://developer.github.com/v3/#increasing-the-unauthenticated-rate-limit-for-oauth-applications + .replace(/\bclient_secret=\w+/g, "client_secret=[REDACTED]") // OAuth tokens can be passed as URL query parameters, although it is not recommended + // see https://developer.github.com/v3/#oauth2-token-sent-in-a-header + .replace(/\baccess_token=\w+/g, "access_token=[REDACTED]"); + this.request = requestCopy; // deprecations + + Object.defineProperty(this, "code", { + get() { + logOnceCode(new deprecation.Deprecation("[@octokit/request-error] `error.code` is deprecated, use `error.status`.")); + return statusCode; + } + + }); + Object.defineProperty(this, "headers", { + get() { + logOnceHeaders(new deprecation.Deprecation("[@octokit/request-error] `error.headers` is deprecated, use `error.response.headers`.")); + return headers || {}; + } + + }); + } + +} + +exports.RequestError = RequestError; +//# sourceMappingURL=index.js.map + + +/***/ }), + +/***/ 1027: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ value: true })); + +function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } + +var endpoint = __nccwpck_require__(8114); +var universalUserAgent = __nccwpck_require__(4378); +var isPlainObject = __nccwpck_require__(4202); +var nodeFetch = _interopDefault(__nccwpck_require__(1307)); +var requestError = __nccwpck_require__(6514); + +const VERSION = "5.6.0"; + +function getBufferResponse(response) { + return response.arrayBuffer(); +} + +function fetchWrapper(requestOptions) { + const log = requestOptions.request && requestOptions.request.log ? requestOptions.request.log : console; + + if (isPlainObject.isPlainObject(requestOptions.body) || Array.isArray(requestOptions.body)) { + requestOptions.body = JSON.stringify(requestOptions.body); + } + + let headers = {}; + let status; + let url; + const fetch = requestOptions.request && requestOptions.request.fetch || nodeFetch; + return fetch(requestOptions.url, Object.assign({ + method: requestOptions.method, + body: requestOptions.body, + headers: requestOptions.headers, + redirect: requestOptions.redirect + }, // `requestOptions.request.agent` type is incompatible + // see https://github.com/octokit/types.ts/pull/264 + requestOptions.request)).then(async response => { + url = response.url; + status = response.status; + + for (const keyAndValue of response.headers) { + headers[keyAndValue[0]] = keyAndValue[1]; + } + + if ("deprecation" in headers) { + const matches = headers.link && headers.link.match(/<([^>]+)>; rel="deprecation"/); + const deprecationLink = matches && matches.pop(); + log.warn(`[@octokit/request] "${requestOptions.method} ${requestOptions.url}" is deprecated. It is scheduled to be removed on ${headers.sunset}${deprecationLink ? `. See ${deprecationLink}` : ""}`); + } + + if (status === 204 || status === 205) { + return; + } // GitHub API returns 200 for HEAD requests + + + if (requestOptions.method === "HEAD") { + if (status < 400) { + return; + } + + throw new requestError.RequestError(response.statusText, status, { + response: { + url, + status, + headers, + data: undefined + }, + request: requestOptions + }); + } + + if (status === 304) { + throw new requestError.RequestError("Not modified", status, { + response: { + url, + status, + headers, + data: await getResponseData(response) + }, + request: requestOptions + }); + } + + if (status >= 400) { + const data = await getResponseData(response); + const error = new requestError.RequestError(toErrorMessage(data), status, { + response: { + url, + status, + headers, + data + }, + request: requestOptions + }); + throw error; + } + + return getResponseData(response); + }).then(data => { + return { + status, + url, + headers, + data + }; + }).catch(error => { + if (error instanceof requestError.RequestError) throw error; + throw new requestError.RequestError(error.message, 500, { + request: requestOptions + }); + }); +} + +async function getResponseData(response) { + const contentType = response.headers.get("content-type"); + + if (/application\/json/.test(contentType)) { + return response.json(); + } + + if (!contentType || /^text\/|charset=utf-8$/.test(contentType)) { + return response.text(); + } + + return getBufferResponse(response); +} + +function toErrorMessage(data) { + if (typeof data === "string") return data; // istanbul ignore else - just in case + + if ("message" in data) { + if (Array.isArray(data.errors)) { + return `${data.message}: ${data.errors.map(JSON.stringify).join(", ")}`; + } + + return data.message; + } // istanbul ignore next - just in case + + + return `Unknown error: ${JSON.stringify(data)}`; +} + +function withDefaults(oldEndpoint, newDefaults) { + const endpoint = oldEndpoint.defaults(newDefaults); + + const newApi = function (route, parameters) { + const endpointOptions = endpoint.merge(route, parameters); + + if (!endpointOptions.request || !endpointOptions.request.hook) { + return fetchWrapper(endpoint.parse(endpointOptions)); + } + + const request = (route, parameters) => { + return fetchWrapper(endpoint.parse(endpoint.merge(route, parameters))); + }; + + Object.assign(request, { + endpoint, + defaults: withDefaults.bind(null, endpoint) + }); + return endpointOptions.request.hook(request, endpointOptions); + }; + + return Object.assign(newApi, { + endpoint, + defaults: withDefaults.bind(null, endpoint) + }); +} + +const request = withDefaults(endpoint.endpoint, { + headers: { + "user-agent": `octokit-request.js/${VERSION} ${universalUserAgent.getUserAgent()}` + } +}); + +exports.request = request; +//# sourceMappingURL=index.js.map + + +/***/ }), + +/***/ 62: +/***/ ((module) => { + +"use strict"; + +module.exports = balanced; +function balanced(a, b, str) { + if (a instanceof RegExp) a = maybeMatch(a, str); + if (b instanceof RegExp) b = maybeMatch(b, str); + + var r = range(a, b, str); + + return r && { + start: r[0], + end: r[1], + pre: str.slice(0, r[0]), + body: str.slice(r[0] + a.length, r[1]), + post: str.slice(r[1] + b.length) + }; +} + +function maybeMatch(reg, str) { + var m = str.match(reg); + return m ? m[0] : null; +} + +balanced.range = range; +function range(a, b, str) { + var begs, beg, left, right, result; + var ai = str.indexOf(a); + var bi = str.indexOf(b, ai + 1); + var i = ai; + + if (ai >= 0 && bi > 0) { + begs = []; + left = str.length; + + while (i >= 0 && !result) { + if (i == ai) { + begs.push(i); + ai = str.indexOf(a, i + 1); + } else if (begs.length == 1) { + result = [ begs.pop(), bi ]; + } else { + beg = begs.pop(); + if (beg < left) { + left = beg; + right = bi; + } + + bi = str.indexOf(b, i + 1); + } + + i = ai < bi && ai >= 0 ? ai : bi; + } + + if (begs.length) { + result = [ left, right ]; + } + } + + return result; +} + + +/***/ }), + +/***/ 7895: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var register = __nccwpck_require__(14) +var addHook = __nccwpck_require__(1898) +var removeHook = __nccwpck_require__(625) + +// bind with array of arguments: https://stackoverflow.com/a/21792913 +var bind = Function.bind +var bindable = bind.bind(bind) + +function bindApi (hook, state, name) { + var removeHookRef = bindable(removeHook, null).apply(null, name ? [state, name] : [state]) + hook.api = { remove: removeHookRef } + hook.remove = removeHookRef + + ;['before', 'error', 'after', 'wrap'].forEach(function (kind) { + var args = name ? [state, kind, name] : [state, kind] + hook[kind] = hook.api[kind] = bindable(addHook, null).apply(null, args) + }) +} + +function HookSingular () { + var singularHookName = 'h' + var singularHookState = { + registry: {} + } + var singularHook = register.bind(null, singularHookState, singularHookName) + bindApi(singularHook, singularHookState, singularHookName) + return singularHook +} + +function HookCollection () { + var state = { + registry: {} + } + + var hook = register.bind(null, state) + bindApi(hook, state) + + return hook +} + +var collectionHookDeprecationMessageDisplayed = false +function Hook () { + if (!collectionHookDeprecationMessageDisplayed) { + console.warn('[before-after-hook]: "Hook()" repurposing warning, use "Hook.Collection()". Read more: https://git.io/upgrade-before-after-hook-to-1.4') + collectionHookDeprecationMessageDisplayed = true + } + return HookCollection() +} + +Hook.Singular = HookSingular.bind() +Hook.Collection = HookCollection.bind() + +module.exports = Hook +// expose constructors as a named property for TypeScript +module.exports.Hook = Hook +module.exports.Singular = Hook.Singular +module.exports.Collection = Hook.Collection + + +/***/ }), + +/***/ 1898: +/***/ ((module) => { + +module.exports = addHook; + +function addHook(state, kind, name, hook) { + var orig = hook; + if (!state.registry[name]) { + state.registry[name] = []; + } + + if (kind === "before") { + hook = function (method, options) { + return Promise.resolve() + .then(orig.bind(null, options)) + .then(method.bind(null, options)); + }; + } + + if (kind === "after") { + hook = function (method, options) { + var result; + return Promise.resolve() + .then(method.bind(null, options)) + .then(function (result_) { + result = result_; + return orig(result, options); + }) + .then(function () { + return result; + }); + }; + } + + if (kind === "error") { + hook = function (method, options) { + return Promise.resolve() + .then(method.bind(null, options)) + .catch(function (error) { + return orig(error, options); + }); + }; + } + + state.registry[name].push({ + hook: hook, + orig: orig, + }); +} + + +/***/ }), + +/***/ 14: +/***/ ((module) => { + +module.exports = register; + +function register(state, name, method, options) { + if (typeof method !== "function") { + throw new Error("method for before hook must be a function"); + } + + if (!options) { + options = {}; + } + + if (Array.isArray(name)) { + return name.reverse().reduce(function (callback, name) { + return register.bind(null, state, name, callback, options); + }, method)(); + } + + return Promise.resolve().then(function () { + if (!state.registry[name]) { + return method(options); + } + + return state.registry[name].reduce(function (method, registered) { + return registered.hook.bind(null, method, options); + }, method)(); + }); +} + + +/***/ }), + +/***/ 625: +/***/ ((module) => { + +module.exports = removeHook; + +function removeHook(state, name, method) { + if (!state.registry[name]) { + return; + } + + var index = state.registry[name] + .map(function (registered) { + return registered.orig; + }) + .indexOf(method); + + if (index === -1) { + return; + } + + state.registry[name].splice(index, 1); +} + + +/***/ }), + +/***/ 839: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var concatMap = __nccwpck_require__(3279); +var balanced = __nccwpck_require__(62); + +module.exports = expandTop; + +var escSlash = '\0SLASH'+Math.random()+'\0'; +var escOpen = '\0OPEN'+Math.random()+'\0'; +var escClose = '\0CLOSE'+Math.random()+'\0'; +var escComma = '\0COMMA'+Math.random()+'\0'; +var escPeriod = '\0PERIOD'+Math.random()+'\0'; + +function numeric(str) { + return parseInt(str, 10) == str + ? parseInt(str, 10) + : str.charCodeAt(0); +} + +function escapeBraces(str) { + return str.split('\\\\').join(escSlash) + .split('\\{').join(escOpen) + .split('\\}').join(escClose) + .split('\\,').join(escComma) + .split('\\.').join(escPeriod); +} + +function unescapeBraces(str) { + return str.split(escSlash).join('\\') + .split(escOpen).join('{') + .split(escClose).join('}') + .split(escComma).join(',') + .split(escPeriod).join('.'); +} + + +// Basically just str.split(","), but handling cases +// where we have nested braced sections, which should be +// treated as individual members, like {a,{b,c},d} +function parseCommaParts(str) { + if (!str) + return ['']; + + var parts = []; + var m = balanced('{', '}', str); + + if (!m) + return str.split(','); + + var pre = m.pre; + var body = m.body; + var post = m.post; + var p = pre.split(','); + + p[p.length-1] += '{' + body + '}'; + var postParts = parseCommaParts(post); + if (post.length) { + p[p.length-1] += postParts.shift(); + p.push.apply(p, postParts); + } + + parts.push.apply(parts, p); + + return parts; +} + +function expandTop(str) { + if (!str) + return []; + + // I don't know why Bash 4.3 does this, but it does. + // Anything starting with {} will have the first two bytes preserved + // but *only* at the top level, so {},a}b will not expand to anything, + // but a{},b}c will be expanded to [a}c,abc]. + // One could argue that this is a bug in Bash, but since the goal of + // this module is to match Bash's rules, we escape a leading {} + if (str.substr(0, 2) === '{}') { + str = '\\{\\}' + str.substr(2); + } + + return expand(escapeBraces(str), true).map(unescapeBraces); +} + +function identity(e) { + return e; +} + +function embrace(str) { + return '{' + str + '}'; +} +function isPadded(el) { + return /^-?0\d/.test(el); +} + +function lte(i, y) { + return i <= y; +} +function gte(i, y) { + return i >= y; +} + +function expand(str, isTop) { + var expansions = []; + + var m = balanced('{', '}', str); + if (!m || /\$$/.test(m.pre)) return [str]; + + var isNumericSequence = /^-?\d+\.\.-?\d+(?:\.\.-?\d+)?$/.test(m.body); + var isAlphaSequence = /^[a-zA-Z]\.\.[a-zA-Z](?:\.\.-?\d+)?$/.test(m.body); + var isSequence = isNumericSequence || isAlphaSequence; + var isOptions = m.body.indexOf(',') >= 0; + if (!isSequence && !isOptions) { + // {a},b} + if (m.post.match(/,.*\}/)) { + str = m.pre + '{' + m.body + escClose + m.post; + return expand(str); + } + return [str]; + } + + var n; + if (isSequence) { + n = m.body.split(/\.\./); + } else { + n = parseCommaParts(m.body); + if (n.length === 1) { + // x{{a,b}}y ==> x{a}y x{b}y + n = expand(n[0], false).map(embrace); + if (n.length === 1) { + var post = m.post.length + ? expand(m.post, false) + : ['']; + return post.map(function(p) { + return m.pre + n[0] + p; + }); + } + } + } + + // at this point, n is the parts, and we know it's not a comma set + // with a single entry. + + // no need to expand pre, since it is guaranteed to be free of brace-sets + var pre = m.pre; + var post = m.post.length + ? expand(m.post, false) + : ['']; + + var N; + + if (isSequence) { + var x = numeric(n[0]); + var y = numeric(n[1]); + var width = Math.max(n[0].length, n[1].length) + var incr = n.length == 3 + ? Math.abs(numeric(n[2])) + : 1; + var test = lte; + var reverse = y < x; + if (reverse) { + incr *= -1; + test = gte; + } + var pad = n.some(isPadded); + + N = []; + + for (var i = x; test(i, y); i += incr) { + var c; + if (isAlphaSequence) { + c = String.fromCharCode(i); + if (c === '\\') + c = ''; + } else { + c = String(i); + if (pad) { + var need = width - c.length; + if (need > 0) { + var z = new Array(need + 1).join('0'); + if (i < 0) + c = '-' + z + c.slice(1); + else + c = z + c; + } + } + } + N.push(c); + } + } else { + N = concatMap(n, function(el) { return expand(el, false) }); + } + + for (var j = 0; j < N.length; j++) { + for (var k = 0; k < post.length; k++) { + var expansion = pre + N[j] + post[k]; + if (!isTop || isSequence || expansion) + expansions.push(expansion); + } + } + + return expansions; +} + + + +/***/ }), + +/***/ 3279: +/***/ ((module) => { + +module.exports = function (xs, fn) { + var res = []; + for (var i = 0; i < xs.length; i++) { + var x = fn(xs[i], i); + if (isArray(x)) res.push.apply(res, x); + else res.push(x); + } + return res; +}; + +var isArray = Array.isArray || function (xs) { + return Object.prototype.toString.call(xs) === '[object Array]'; +}; + + +/***/ }), + +/***/ 5887: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ value: true })); + +class Deprecation extends Error { + constructor(message) { + super(message); // Maintains proper stack trace (only available on V8) + + /* istanbul ignore next */ + + if (Error.captureStackTrace) { + Error.captureStackTrace(this, this.constructor); + } + + this.name = 'Deprecation'; + } + +} + +exports.Deprecation = Deprecation; + + +/***/ }), + +/***/ 1881: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +module.exports = realpath +realpath.realpath = realpath +realpath.sync = realpathSync +realpath.realpathSync = realpathSync +realpath.monkeypatch = monkeypatch +realpath.unmonkeypatch = unmonkeypatch + +var fs = __nccwpck_require__(7147) +var origRealpath = fs.realpath +var origRealpathSync = fs.realpathSync + +var version = process.version +var ok = /^v[0-5]\./.test(version) +var old = __nccwpck_require__(7328) + +function newError (er) { + return er && er.syscall === 'realpath' && ( + er.code === 'ELOOP' || + er.code === 'ENOMEM' || + er.code === 'ENAMETOOLONG' + ) +} + +function realpath (p, cache, cb) { + if (ok) { + return origRealpath(p, cache, cb) + } + + if (typeof cache === 'function') { + cb = cache + cache = null + } + origRealpath(p, cache, function (er, result) { + if (newError(er)) { + old.realpath(p, cache, cb) + } else { + cb(er, result) + } + }) +} + +function realpathSync (p, cache) { + if (ok) { + return origRealpathSync(p, cache) + } + + try { + return origRealpathSync(p, cache) + } catch (er) { + if (newError(er)) { + return old.realpathSync(p, cache) + } else { + throw er + } + } +} + +function monkeypatch () { + fs.realpath = realpath + fs.realpathSync = realpathSync +} + +function unmonkeypatch () { + fs.realpath = origRealpath + fs.realpathSync = origRealpathSync +} + + +/***/ }), + +/***/ 7328: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +var pathModule = __nccwpck_require__(1017); +var isWindows = process.platform === 'win32'; +var fs = __nccwpck_require__(7147); + +// JavaScript implementation of realpath, ported from node pre-v6 + +var DEBUG = process.env.NODE_DEBUG && /fs/.test(process.env.NODE_DEBUG); + +function rethrow() { + // Only enable in debug mode. A backtrace uses ~1000 bytes of heap space and + // is fairly slow to generate. + var callback; + if (DEBUG) { + var backtrace = new Error; + callback = debugCallback; + } else + callback = missingCallback; + + return callback; + + function debugCallback(err) { + if (err) { + backtrace.message = err.message; + err = backtrace; + missingCallback(err); + } + } + + function missingCallback(err) { + if (err) { + if (process.throwDeprecation) + throw err; // Forgot a callback but don't know where? Use NODE_DEBUG=fs + else if (!process.noDeprecation) { + var msg = 'fs: missing callback ' + (err.stack || err.message); + if (process.traceDeprecation) + console.trace(msg); + else + console.error(msg); + } + } + } +} + +function maybeCallback(cb) { + return typeof cb === 'function' ? cb : rethrow(); +} + +var normalize = pathModule.normalize; + +// Regexp that finds the next partion of a (partial) path +// result is [base_with_slash, base], e.g. ['somedir/', 'somedir'] +if (isWindows) { + var nextPartRe = /(.*?)(?:[\/\\]+|$)/g; +} else { + var nextPartRe = /(.*?)(?:[\/]+|$)/g; +} + +// Regex to find the device root, including trailing slash. E.g. 'c:\\'. +if (isWindows) { + var splitRootRe = /^(?:[a-zA-Z]:|[\\\/]{2}[^\\\/]+[\\\/][^\\\/]+)?[\\\/]*/; +} else { + var splitRootRe = /^[\/]*/; +} + +exports.realpathSync = function realpathSync(p, cache) { + // make p is absolute + p = pathModule.resolve(p); + + if (cache && Object.prototype.hasOwnProperty.call(cache, p)) { + return cache[p]; + } + + var original = p, + seenLinks = {}, + knownHard = {}; + + // current character position in p + var pos; + // the partial path so far, including a trailing slash if any + var current; + // the partial path without a trailing slash (except when pointing at a root) + var base; + // the partial path scanned in the previous round, with slash + var previous; + + start(); + + function start() { + // Skip over roots + var m = splitRootRe.exec(p); + pos = m[0].length; + current = m[0]; + base = m[0]; + previous = ''; + + // On windows, check that the root exists. On unix there is no need. + if (isWindows && !knownHard[base]) { + fs.lstatSync(base); + knownHard[base] = true; + } + } + + // walk down the path, swapping out linked pathparts for their real + // values + // NB: p.length changes. + while (pos < p.length) { + // find the next part + nextPartRe.lastIndex = pos; + var result = nextPartRe.exec(p); + previous = current; + current += result[0]; + base = previous + result[1]; + pos = nextPartRe.lastIndex; + + // continue if not a symlink + if (knownHard[base] || (cache && cache[base] === base)) { + continue; + } + + var resolvedLink; + if (cache && Object.prototype.hasOwnProperty.call(cache, base)) { + // some known symbolic link. no need to stat again. + resolvedLink = cache[base]; + } else { + var stat = fs.lstatSync(base); + if (!stat.isSymbolicLink()) { + knownHard[base] = true; + if (cache) cache[base] = base; + continue; + } + + // read the link if it wasn't read before + // dev/ino always return 0 on windows, so skip the check. + var linkTarget = null; + if (!isWindows) { + var id = stat.dev.toString(32) + ':' + stat.ino.toString(32); + if (seenLinks.hasOwnProperty(id)) { + linkTarget = seenLinks[id]; + } + } + if (linkTarget === null) { + fs.statSync(base); + linkTarget = fs.readlinkSync(base); + } + resolvedLink = pathModule.resolve(previous, linkTarget); + // track this, if given a cache. + if (cache) cache[base] = resolvedLink; + if (!isWindows) seenLinks[id] = linkTarget; + } + + // resolve the link, then start over + p = pathModule.resolve(resolvedLink, p.slice(pos)); + start(); + } + + if (cache) cache[original] = p; + + return p; +}; + + +exports.realpath = function realpath(p, cache, cb) { + if (typeof cb !== 'function') { + cb = maybeCallback(cache); + cache = null; + } + + // make p is absolute + p = pathModule.resolve(p); + + if (cache && Object.prototype.hasOwnProperty.call(cache, p)) { + return process.nextTick(cb.bind(null, null, cache[p])); + } + + var original = p, + seenLinks = {}, + knownHard = {}; + + // current character position in p + var pos; + // the partial path so far, including a trailing slash if any + var current; + // the partial path without a trailing slash (except when pointing at a root) + var base; + // the partial path scanned in the previous round, with slash + var previous; + + start(); + + function start() { + // Skip over roots + var m = splitRootRe.exec(p); + pos = m[0].length; + current = m[0]; + base = m[0]; + previous = ''; + + // On windows, check that the root exists. On unix there is no need. + if (isWindows && !knownHard[base]) { + fs.lstat(base, function(err) { + if (err) return cb(err); + knownHard[base] = true; + LOOP(); + }); + } else { + process.nextTick(LOOP); + } + } + + // walk down the path, swapping out linked pathparts for their real + // values + function LOOP() { + // stop if scanned past end of path + if (pos >= p.length) { + if (cache) cache[original] = p; + return cb(null, p); + } + + // find the next part + nextPartRe.lastIndex = pos; + var result = nextPartRe.exec(p); + previous = current; + current += result[0]; + base = previous + result[1]; + pos = nextPartRe.lastIndex; + + // continue if not a symlink + if (knownHard[base] || (cache && cache[base] === base)) { + return process.nextTick(LOOP); + } + + if (cache && Object.prototype.hasOwnProperty.call(cache, base)) { + // known symbolic link. no need to stat again. + return gotResolvedLink(cache[base]); + } + + return fs.lstat(base, gotStat); + } + + function gotStat(err, stat) { + if (err) return cb(err); + + // if not a symlink, skip to the next path part + if (!stat.isSymbolicLink()) { + knownHard[base] = true; + if (cache) cache[base] = base; + return process.nextTick(LOOP); + } + + // stat & read the link if not read before + // call gotTarget as soon as the link target is known + // dev/ino always return 0 on windows, so skip the check. + if (!isWindows) { + var id = stat.dev.toString(32) + ':' + stat.ino.toString(32); + if (seenLinks.hasOwnProperty(id)) { + return gotTarget(null, seenLinks[id], base); + } + } + fs.stat(base, function(err) { + if (err) return cb(err); + + fs.readlink(base, function(err, target) { + if (!isWindows) seenLinks[id] = target; + gotTarget(err, target); + }); + }); + } + + function gotTarget(err, target, base) { + if (err) return cb(err); + + var resolvedLink = pathModule.resolve(previous, target); + if (cache) cache[base] = resolvedLink; + gotResolvedLink(resolvedLink); + } + + function gotResolvedLink(resolvedLink) { + // resolve the link, then start over + p = pathModule.resolve(resolvedLink, p.slice(pos)); + start(); + } +}; + + +/***/ }), + +/***/ 8978: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +exports.setopts = setopts +exports.ownProp = ownProp +exports.makeAbs = makeAbs +exports.finish = finish +exports.mark = mark +exports.isIgnored = isIgnored +exports.childrenIgnored = childrenIgnored + +function ownProp (obj, field) { + return Object.prototype.hasOwnProperty.call(obj, field) +} + +var fs = __nccwpck_require__(7147) +var path = __nccwpck_require__(1017) +var minimatch = __nccwpck_require__(1486) +var isAbsolute = __nccwpck_require__(1237) +var Minimatch = minimatch.Minimatch + +function alphasort (a, b) { + return a.localeCompare(b, 'en') +} + +function setupIgnores (self, options) { + self.ignore = options.ignore || [] + + if (!Array.isArray(self.ignore)) + self.ignore = [self.ignore] + + if (self.ignore.length) { + self.ignore = self.ignore.map(ignoreMap) + } +} + +// ignore patterns are always in dot:true mode. +function ignoreMap (pattern) { + var gmatcher = null + if (pattern.slice(-3) === '/**') { + var gpattern = pattern.replace(/(\/\*\*)+$/, '') + gmatcher = new Minimatch(gpattern, { dot: true }) + } + + return { + matcher: new Minimatch(pattern, { dot: true }), + gmatcher: gmatcher + } +} + +function setopts (self, pattern, options) { + if (!options) + options = {} + + // base-matching: just use globstar for that. + if (options.matchBase && -1 === pattern.indexOf("/")) { + if (options.noglobstar) { + throw new Error("base matching requires globstar") + } + pattern = "**/" + pattern + } + + self.silent = !!options.silent + self.pattern = pattern + self.strict = options.strict !== false + self.realpath = !!options.realpath + self.realpathCache = options.realpathCache || Object.create(null) + self.follow = !!options.follow + self.dot = !!options.dot + self.mark = !!options.mark + self.nodir = !!options.nodir + if (self.nodir) + self.mark = true + self.sync = !!options.sync + self.nounique = !!options.nounique + self.nonull = !!options.nonull + self.nosort = !!options.nosort + self.nocase = !!options.nocase + self.stat = !!options.stat + self.noprocess = !!options.noprocess + self.absolute = !!options.absolute + self.fs = options.fs || fs + + self.maxLength = options.maxLength || Infinity + self.cache = options.cache || Object.create(null) + self.statCache = options.statCache || Object.create(null) + self.symlinks = options.symlinks || Object.create(null) + + setupIgnores(self, options) + + self.changedCwd = false + var cwd = process.cwd() + if (!ownProp(options, "cwd")) + self.cwd = cwd + else { + self.cwd = path.resolve(options.cwd) + self.changedCwd = self.cwd !== cwd + } + + self.root = options.root || path.resolve(self.cwd, "/") + self.root = path.resolve(self.root) + if (process.platform === "win32") + self.root = self.root.replace(/\\/g, "/") + + // TODO: is an absolute `cwd` supposed to be resolved against `root`? + // e.g. { cwd: '/test', root: __dirname } === path.join(__dirname, '/test') + self.cwdAbs = isAbsolute(self.cwd) ? self.cwd : makeAbs(self, self.cwd) + if (process.platform === "win32") + self.cwdAbs = self.cwdAbs.replace(/\\/g, "/") + self.nomount = !!options.nomount + + // disable comments and negation in Minimatch. + // Note that they are not supported in Glob itself anyway. + options.nonegate = true + options.nocomment = true + + self.minimatch = new Minimatch(pattern, options) + self.options = self.minimatch.options +} + +function finish (self) { + var nou = self.nounique + var all = nou ? [] : Object.create(null) + + for (var i = 0, l = self.matches.length; i < l; i ++) { + var matches = self.matches[i] + if (!matches || Object.keys(matches).length === 0) { + if (self.nonull) { + // do like the shell, and spit out the literal glob + var literal = self.minimatch.globSet[i] + if (nou) + all.push(literal) + else + all[literal] = true + } + } else { + // had matches + var m = Object.keys(matches) + if (nou) + all.push.apply(all, m) + else + m.forEach(function (m) { + all[m] = true + }) + } + } + + if (!nou) + all = Object.keys(all) + + if (!self.nosort) + all = all.sort(alphasort) + + // at *some* point we statted all of these + if (self.mark) { + for (var i = 0; i < all.length; i++) { + all[i] = self._mark(all[i]) + } + if (self.nodir) { + all = all.filter(function (e) { + var notDir = !(/\/$/.test(e)) + var c = self.cache[e] || self.cache[makeAbs(self, e)] + if (notDir && c) + notDir = c !== 'DIR' && !Array.isArray(c) + return notDir + }) + } + } + + if (self.ignore.length) + all = all.filter(function(m) { + return !isIgnored(self, m) + }) + + self.found = all +} + +function mark (self, p) { + var abs = makeAbs(self, p) + var c = self.cache[abs] + var m = p + if (c) { + var isDir = c === 'DIR' || Array.isArray(c) + var slash = p.slice(-1) === '/' + + if (isDir && !slash) + m += '/' + else if (!isDir && slash) + m = m.slice(0, -1) + + if (m !== p) { + var mabs = makeAbs(self, m) + self.statCache[mabs] = self.statCache[abs] + self.cache[mabs] = self.cache[abs] + } + } + + return m +} + +// lotta situps... +function makeAbs (self, f) { + var abs = f + if (f.charAt(0) === '/') { + abs = path.join(self.root, f) + } else if (isAbsolute(f) || f === '') { + abs = f + } else if (self.changedCwd) { + abs = path.resolve(self.cwd, f) + } else { + abs = path.resolve(f) + } + + if (process.platform === 'win32') + abs = abs.replace(/\\/g, '/') + + return abs +} + + +// Return true, if pattern ends with globstar '**', for the accompanying parent directory. +// Ex:- If node_modules/** is the pattern, add 'node_modules' to ignore list along with it's contents +function isIgnored (self, path) { + if (!self.ignore.length) + return false + + return self.ignore.some(function(item) { + return item.matcher.match(path) || !!(item.gmatcher && item.gmatcher.match(path)) + }) +} + +function childrenIgnored (self, path) { + if (!self.ignore.length) + return false + + return self.ignore.some(function(item) { + return !!(item.gmatcher && item.gmatcher.match(path)) + }) +} + + +/***/ }), + +/***/ 6691: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +// Approach: +// +// 1. Get the minimatch set +// 2. For each pattern in the set, PROCESS(pattern, false) +// 3. Store matches per-set, then uniq them +// +// PROCESS(pattern, inGlobStar) +// Get the first [n] items from pattern that are all strings +// Join these together. This is PREFIX. +// If there is no more remaining, then stat(PREFIX) and +// add to matches if it succeeds. END. +// +// If inGlobStar and PREFIX is symlink and points to dir +// set ENTRIES = [] +// else readdir(PREFIX) as ENTRIES +// If fail, END +// +// with ENTRIES +// If pattern[n] is GLOBSTAR +// // handle the case where the globstar match is empty +// // by pruning it out, and testing the resulting pattern +// PROCESS(pattern[0..n] + pattern[n+1 .. $], false) +// // handle other cases. +// for ENTRY in ENTRIES (not dotfiles) +// // attach globstar + tail onto the entry +// // Mark that this entry is a globstar match +// PROCESS(pattern[0..n] + ENTRY + pattern[n .. $], true) +// +// else // not globstar +// for ENTRY in ENTRIES (not dotfiles, unless pattern[n] is dot) +// Test ENTRY against pattern[n] +// If fails, continue +// If passes, PROCESS(pattern[0..n] + item + pattern[n+1 .. $]) +// +// Caveat: +// Cache all stats and readdirs results to minimize syscall. Since all +// we ever care about is existence and directory-ness, we can just keep +// `true` for files, and [children,...] for directories, or `false` for +// things that don't exist. + +module.exports = glob + +var rp = __nccwpck_require__(1881) +var minimatch = __nccwpck_require__(1486) +var Minimatch = minimatch.Minimatch +var inherits = __nccwpck_require__(5635) +var EE = (__nccwpck_require__(2361).EventEmitter) +var path = __nccwpck_require__(1017) +var assert = __nccwpck_require__(9491) +var isAbsolute = __nccwpck_require__(1237) +var globSync = __nccwpck_require__(3892) +var common = __nccwpck_require__(8978) +var setopts = common.setopts +var ownProp = common.ownProp +var inflight = __nccwpck_require__(848) +var util = __nccwpck_require__(3837) +var childrenIgnored = common.childrenIgnored +var isIgnored = common.isIgnored + +var once = __nccwpck_require__(447) + +function glob (pattern, options, cb) { + if (typeof options === 'function') cb = options, options = {} + if (!options) options = {} + + if (options.sync) { + if (cb) + throw new TypeError('callback provided to sync glob') + return globSync(pattern, options) + } + + return new Glob(pattern, options, cb) +} + +glob.sync = globSync +var GlobSync = glob.GlobSync = globSync.GlobSync + +// old api surface +glob.glob = glob + +function extend (origin, add) { + if (add === null || typeof add !== 'object') { + return origin + } + + var keys = Object.keys(add) + var i = keys.length + while (i--) { + origin[keys[i]] = add[keys[i]] + } + return origin +} + +glob.hasMagic = function (pattern, options_) { + var options = extend({}, options_) + options.noprocess = true + + var g = new Glob(pattern, options) + var set = g.minimatch.set + + if (!pattern) + return false + + if (set.length > 1) + return true + + for (var j = 0; j < set[0].length; j++) { + if (typeof set[0][j] !== 'string') + return true + } + + return false +} + +glob.Glob = Glob +inherits(Glob, EE) +function Glob (pattern, options, cb) { + if (typeof options === 'function') { + cb = options + options = null + } + + if (options && options.sync) { + if (cb) + throw new TypeError('callback provided to sync glob') + return new GlobSync(pattern, options) + } + + if (!(this instanceof Glob)) + return new Glob(pattern, options, cb) + + setopts(this, pattern, options) + this._didRealPath = false + + // process each pattern in the minimatch set + var n = this.minimatch.set.length + + // The matches are stored as {: true,...} so that + // duplicates are automagically pruned. + // Later, we do an Object.keys() on these. + // Keep them as a list so we can fill in when nonull is set. + this.matches = new Array(n) + + if (typeof cb === 'function') { + cb = once(cb) + this.on('error', cb) + this.on('end', function (matches) { + cb(null, matches) + }) + } + + var self = this + this._processing = 0 + + this._emitQueue = [] + this._processQueue = [] + this.paused = false + + if (this.noprocess) + return this + + if (n === 0) + return done() + + var sync = true + for (var i = 0; i < n; i ++) { + this._process(this.minimatch.set[i], i, false, done) + } + sync = false + + function done () { + --self._processing + if (self._processing <= 0) { + if (sync) { + process.nextTick(function () { + self._finish() + }) + } else { + self._finish() + } + } + } +} + +Glob.prototype._finish = function () { + assert(this instanceof Glob) + if (this.aborted) + return + + if (this.realpath && !this._didRealpath) + return this._realpath() + + common.finish(this) + this.emit('end', this.found) +} + +Glob.prototype._realpath = function () { + if (this._didRealpath) + return + + this._didRealpath = true + + var n = this.matches.length + if (n === 0) + return this._finish() + + var self = this + for (var i = 0; i < this.matches.length; i++) + this._realpathSet(i, next) + + function next () { + if (--n === 0) + self._finish() + } +} + +Glob.prototype._realpathSet = function (index, cb) { + var matchset = this.matches[index] + if (!matchset) + return cb() + + var found = Object.keys(matchset) + var self = this + var n = found.length + + if (n === 0) + return cb() + + var set = this.matches[index] = Object.create(null) + found.forEach(function (p, i) { + // If there's a problem with the stat, then it means that + // one or more of the links in the realpath couldn't be + // resolved. just return the abs value in that case. + p = self._makeAbs(p) + rp.realpath(p, self.realpathCache, function (er, real) { + if (!er) + set[real] = true + else if (er.syscall === 'stat') + set[p] = true + else + self.emit('error', er) // srsly wtf right here + + if (--n === 0) { + self.matches[index] = set + cb() + } + }) + }) +} + +Glob.prototype._mark = function (p) { + return common.mark(this, p) +} + +Glob.prototype._makeAbs = function (f) { + return common.makeAbs(this, f) +} + +Glob.prototype.abort = function () { + this.aborted = true + this.emit('abort') +} + +Glob.prototype.pause = function () { + if (!this.paused) { + this.paused = true + this.emit('pause') + } +} + +Glob.prototype.resume = function () { + if (this.paused) { + this.emit('resume') + this.paused = false + if (this._emitQueue.length) { + var eq = this._emitQueue.slice(0) + this._emitQueue.length = 0 + for (var i = 0; i < eq.length; i ++) { + var e = eq[i] + this._emitMatch(e[0], e[1]) + } + } + if (this._processQueue.length) { + var pq = this._processQueue.slice(0) + this._processQueue.length = 0 + for (var i = 0; i < pq.length; i ++) { + var p = pq[i] + this._processing-- + this._process(p[0], p[1], p[2], p[3]) + } + } + } +} + +Glob.prototype._process = function (pattern, index, inGlobStar, cb) { + assert(this instanceof Glob) + assert(typeof cb === 'function') + + if (this.aborted) + return + + this._processing++ + if (this.paused) { + this._processQueue.push([pattern, index, inGlobStar, cb]) + return + } + + //console.error('PROCESS %d', this._processing, pattern) + + // Get the first [n] parts of pattern that are all strings. + var n = 0 + while (typeof pattern[n] === 'string') { + n ++ + } + // now n is the index of the first one that is *not* a string. + + // see if there's anything else + var prefix + switch (n) { + // if not, then this is rather simple + case pattern.length: + this._processSimple(pattern.join('/'), index, cb) + return + + case 0: + // pattern *starts* with some non-trivial item. + // going to readdir(cwd), but not include the prefix in matches. + prefix = null + break + + default: + // pattern has some string bits in the front. + // whatever it starts with, whether that's 'absolute' like /foo/bar, + // or 'relative' like '../baz' + prefix = pattern.slice(0, n).join('/') + break + } + + var remain = pattern.slice(n) + + // get the list of entries. + var read + if (prefix === null) + read = '.' + else if (isAbsolute(prefix) || isAbsolute(pattern.join('/'))) { + if (!prefix || !isAbsolute(prefix)) + prefix = '/' + prefix + read = prefix + } else + read = prefix + + var abs = this._makeAbs(read) + + //if ignored, skip _processing + if (childrenIgnored(this, read)) + return cb() + + var isGlobStar = remain[0] === minimatch.GLOBSTAR + if (isGlobStar) + this._processGlobStar(prefix, read, abs, remain, index, inGlobStar, cb) + else + this._processReaddir(prefix, read, abs, remain, index, inGlobStar, cb) +} + +Glob.prototype._processReaddir = function (prefix, read, abs, remain, index, inGlobStar, cb) { + var self = this + this._readdir(abs, inGlobStar, function (er, entries) { + return self._processReaddir2(prefix, read, abs, remain, index, inGlobStar, entries, cb) + }) +} + +Glob.prototype._processReaddir2 = function (prefix, read, abs, remain, index, inGlobStar, entries, cb) { + + // if the abs isn't a dir, then nothing can match! + if (!entries) + return cb() + + // It will only match dot entries if it starts with a dot, or if + // dot is set. Stuff like @(.foo|.bar) isn't allowed. + var pn = remain[0] + var negate = !!this.minimatch.negate + var rawGlob = pn._glob + var dotOk = this.dot || rawGlob.charAt(0) === '.' + + var matchedEntries = [] + for (var i = 0; i < entries.length; i++) { + var e = entries[i] + if (e.charAt(0) !== '.' || dotOk) { + var m + if (negate && !prefix) { + m = !e.match(pn) + } else { + m = e.match(pn) + } + if (m) + matchedEntries.push(e) + } + } + + //console.error('prd2', prefix, entries, remain[0]._glob, matchedEntries) + + var len = matchedEntries.length + // If there are no matched entries, then nothing matches. + if (len === 0) + return cb() + + // if this is the last remaining pattern bit, then no need for + // an additional stat *unless* the user has specified mark or + // stat explicitly. We know they exist, since readdir returned + // them. + + if (remain.length === 1 && !this.mark && !this.stat) { + if (!this.matches[index]) + this.matches[index] = Object.create(null) + + for (var i = 0; i < len; i ++) { + var e = matchedEntries[i] + if (prefix) { + if (prefix !== '/') + e = prefix + '/' + e + else + e = prefix + e + } + + if (e.charAt(0) === '/' && !this.nomount) { + e = path.join(this.root, e) + } + this._emitMatch(index, e) + } + // This was the last one, and no stats were needed + return cb() + } + + // now test all matched entries as stand-ins for that part + // of the pattern. + remain.shift() + for (var i = 0; i < len; i ++) { + var e = matchedEntries[i] + var newPattern + if (prefix) { + if (prefix !== '/') + e = prefix + '/' + e + else + e = prefix + e + } + this._process([e].concat(remain), index, inGlobStar, cb) + } + cb() +} + +Glob.prototype._emitMatch = function (index, e) { + if (this.aborted) + return + + if (isIgnored(this, e)) + return + + if (this.paused) { + this._emitQueue.push([index, e]) + return + } + + var abs = isAbsolute(e) ? e : this._makeAbs(e) + + if (this.mark) + e = this._mark(e) + + if (this.absolute) + e = abs + + if (this.matches[index][e]) + return + + if (this.nodir) { + var c = this.cache[abs] + if (c === 'DIR' || Array.isArray(c)) + return + } + + this.matches[index][e] = true + + var st = this.statCache[abs] + if (st) + this.emit('stat', e, st) + + this.emit('match', e) +} + +Glob.prototype._readdirInGlobStar = function (abs, cb) { + if (this.aborted) + return + + // follow all symlinked directories forever + // just proceed as if this is a non-globstar situation + if (this.follow) + return this._readdir(abs, false, cb) + + var lstatkey = 'lstat\0' + abs + var self = this + var lstatcb = inflight(lstatkey, lstatcb_) + + if (lstatcb) + self.fs.lstat(abs, lstatcb) + + function lstatcb_ (er, lstat) { + if (er && er.code === 'ENOENT') + return cb() + + var isSym = lstat && lstat.isSymbolicLink() + self.symlinks[abs] = isSym + + // If it's not a symlink or a dir, then it's definitely a regular file. + // don't bother doing a readdir in that case. + if (!isSym && lstat && !lstat.isDirectory()) { + self.cache[abs] = 'FILE' + cb() + } else + self._readdir(abs, false, cb) + } +} + +Glob.prototype._readdir = function (abs, inGlobStar, cb) { + if (this.aborted) + return + + cb = inflight('readdir\0'+abs+'\0'+inGlobStar, cb) + if (!cb) + return + + //console.error('RD %j %j', +inGlobStar, abs) + if (inGlobStar && !ownProp(this.symlinks, abs)) + return this._readdirInGlobStar(abs, cb) + + if (ownProp(this.cache, abs)) { + var c = this.cache[abs] + if (!c || c === 'FILE') + return cb() + + if (Array.isArray(c)) + return cb(null, c) + } + + var self = this + self.fs.readdir(abs, readdirCb(this, abs, cb)) +} + +function readdirCb (self, abs, cb) { + return function (er, entries) { + if (er) + self._readdirError(abs, er, cb) + else + self._readdirEntries(abs, entries, cb) + } +} + +Glob.prototype._readdirEntries = function (abs, entries, cb) { + if (this.aborted) + return + + // if we haven't asked to stat everything, then just + // assume that everything in there exists, so we can avoid + // having to stat it a second time. + if (!this.mark && !this.stat) { + for (var i = 0; i < entries.length; i ++) { + var e = entries[i] + if (abs === '/') + e = abs + e + else + e = abs + '/' + e + this.cache[e] = true + } + } + + this.cache[abs] = entries + return cb(null, entries) +} + +Glob.prototype._readdirError = function (f, er, cb) { + if (this.aborted) + return + + // handle errors, and cache the information + switch (er.code) { + case 'ENOTSUP': // https://github.com/isaacs/node-glob/issues/205 + case 'ENOTDIR': // totally normal. means it *does* exist. + var abs = this._makeAbs(f) + this.cache[abs] = 'FILE' + if (abs === this.cwdAbs) { + var error = new Error(er.code + ' invalid cwd ' + this.cwd) + error.path = this.cwd + error.code = er.code + this.emit('error', error) + this.abort() + } + break + + case 'ENOENT': // not terribly unusual + case 'ELOOP': + case 'ENAMETOOLONG': + case 'UNKNOWN': + this.cache[this._makeAbs(f)] = false + break + + default: // some unusual error. Treat as failure. + this.cache[this._makeAbs(f)] = false + if (this.strict) { + this.emit('error', er) + // If the error is handled, then we abort + // if not, we threw out of here + this.abort() + } + if (!this.silent) + console.error('glob error', er) + break + } + + return cb() +} + +Glob.prototype._processGlobStar = function (prefix, read, abs, remain, index, inGlobStar, cb) { + var self = this + this._readdir(abs, inGlobStar, function (er, entries) { + self._processGlobStar2(prefix, read, abs, remain, index, inGlobStar, entries, cb) + }) +} + + +Glob.prototype._processGlobStar2 = function (prefix, read, abs, remain, index, inGlobStar, entries, cb) { + //console.error('pgs2', prefix, remain[0], entries) + + // no entries means not a dir, so it can never have matches + // foo.txt/** doesn't match foo.txt + if (!entries) + return cb() + + // test without the globstar, and with every child both below + // and replacing the globstar. + var remainWithoutGlobStar = remain.slice(1) + var gspref = prefix ? [ prefix ] : [] + var noGlobStar = gspref.concat(remainWithoutGlobStar) + + // the noGlobStar pattern exits the inGlobStar state + this._process(noGlobStar, index, false, cb) + + var isSym = this.symlinks[abs] + var len = entries.length + + // If it's a symlink, and we're in a globstar, then stop + if (isSym && inGlobStar) + return cb() + + for (var i = 0; i < len; i++) { + var e = entries[i] + if (e.charAt(0) === '.' && !this.dot) + continue + + // these two cases enter the inGlobStar state + var instead = gspref.concat(entries[i], remainWithoutGlobStar) + this._process(instead, index, true, cb) + + var below = gspref.concat(entries[i], remain) + this._process(below, index, true, cb) + } + + cb() +} + +Glob.prototype._processSimple = function (prefix, index, cb) { + // XXX review this. Shouldn't it be doing the mounting etc + // before doing stat? kinda weird? + var self = this + this._stat(prefix, function (er, exists) { + self._processSimple2(prefix, index, er, exists, cb) + }) +} +Glob.prototype._processSimple2 = function (prefix, index, er, exists, cb) { + + //console.error('ps2', prefix, exists) + + if (!this.matches[index]) + this.matches[index] = Object.create(null) + + // If it doesn't exist, then just mark the lack of results + if (!exists) + return cb() + + if (prefix && isAbsolute(prefix) && !this.nomount) { + var trail = /[\/\\]$/.test(prefix) + if (prefix.charAt(0) === '/') { + prefix = path.join(this.root, prefix) + } else { + prefix = path.resolve(this.root, prefix) + if (trail) + prefix += '/' + } + } + + if (process.platform === 'win32') + prefix = prefix.replace(/\\/g, '/') + + // Mark this as a match + this._emitMatch(index, prefix) + cb() +} + +// Returns either 'DIR', 'FILE', or false +Glob.prototype._stat = function (f, cb) { + var abs = this._makeAbs(f) + var needDir = f.slice(-1) === '/' + + if (f.length > this.maxLength) + return cb() + + if (!this.stat && ownProp(this.cache, abs)) { + var c = this.cache[abs] + + if (Array.isArray(c)) + c = 'DIR' + + // It exists, but maybe not how we need it + if (!needDir || c === 'DIR') + return cb(null, c) + + if (needDir && c === 'FILE') + return cb() + + // otherwise we have to stat, because maybe c=true + // if we know it exists, but not what it is. + } + + var exists + var stat = this.statCache[abs] + if (stat !== undefined) { + if (stat === false) + return cb(null, stat) + else { + var type = stat.isDirectory() ? 'DIR' : 'FILE' + if (needDir && type === 'FILE') + return cb() + else + return cb(null, type, stat) + } + } + + var self = this + var statcb = inflight('stat\0' + abs, lstatcb_) + if (statcb) + self.fs.lstat(abs, statcb) + + function lstatcb_ (er, lstat) { + if (lstat && lstat.isSymbolicLink()) { + // If it's a symlink, then treat it as the target, unless + // the target does not exist, then treat it as a file. + return self.fs.stat(abs, function (er, stat) { + if (er) + self._stat2(f, abs, null, lstat, cb) + else + self._stat2(f, abs, er, stat, cb) + }) + } else { + self._stat2(f, abs, er, lstat, cb) + } + } +} + +Glob.prototype._stat2 = function (f, abs, er, stat, cb) { + if (er && (er.code === 'ENOENT' || er.code === 'ENOTDIR')) { + this.statCache[abs] = false + return cb() + } + + var needDir = f.slice(-1) === '/' + this.statCache[abs] = stat + + if (abs.slice(-1) === '/' && stat && !stat.isDirectory()) + return cb(null, false, stat) + + var c = true + if (stat) + c = stat.isDirectory() ? 'DIR' : 'FILE' + this.cache[abs] = this.cache[abs] || c + + if (needDir && c === 'FILE') + return cb() + + return cb(null, c, stat) +} + + +/***/ }), + +/***/ 3892: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +module.exports = globSync +globSync.GlobSync = GlobSync + +var rp = __nccwpck_require__(1881) +var minimatch = __nccwpck_require__(1486) +var Minimatch = minimatch.Minimatch +var Glob = (__nccwpck_require__(6691).Glob) +var util = __nccwpck_require__(3837) +var path = __nccwpck_require__(1017) +var assert = __nccwpck_require__(9491) +var isAbsolute = __nccwpck_require__(1237) +var common = __nccwpck_require__(8978) +var setopts = common.setopts +var ownProp = common.ownProp +var childrenIgnored = common.childrenIgnored +var isIgnored = common.isIgnored + +function globSync (pattern, options) { + if (typeof options === 'function' || arguments.length === 3) + throw new TypeError('callback provided to sync glob\n'+ + 'See: https://github.com/isaacs/node-glob/issues/167') + + return new GlobSync(pattern, options).found +} + +function GlobSync (pattern, options) { + if (!pattern) + throw new Error('must provide pattern') + + if (typeof options === 'function' || arguments.length === 3) + throw new TypeError('callback provided to sync glob\n'+ + 'See: https://github.com/isaacs/node-glob/issues/167') + + if (!(this instanceof GlobSync)) + return new GlobSync(pattern, options) + + setopts(this, pattern, options) + + if (this.noprocess) + return this + + var n = this.minimatch.set.length + this.matches = new Array(n) + for (var i = 0; i < n; i ++) { + this._process(this.minimatch.set[i], i, false) + } + this._finish() +} + +GlobSync.prototype._finish = function () { + assert(this instanceof GlobSync) + if (this.realpath) { + var self = this + this.matches.forEach(function (matchset, index) { + var set = self.matches[index] = Object.create(null) + for (var p in matchset) { + try { + p = self._makeAbs(p) + var real = rp.realpathSync(p, self.realpathCache) + set[real] = true + } catch (er) { + if (er.syscall === 'stat') + set[self._makeAbs(p)] = true + else + throw er + } + } + }) + } + common.finish(this) +} + + +GlobSync.prototype._process = function (pattern, index, inGlobStar) { + assert(this instanceof GlobSync) + + // Get the first [n] parts of pattern that are all strings. + var n = 0 + while (typeof pattern[n] === 'string') { + n ++ + } + // now n is the index of the first one that is *not* a string. + + // See if there's anything else + var prefix + switch (n) { + // if not, then this is rather simple + case pattern.length: + this._processSimple(pattern.join('/'), index) + return + + case 0: + // pattern *starts* with some non-trivial item. + // going to readdir(cwd), but not include the prefix in matches. + prefix = null + break + + default: + // pattern has some string bits in the front. + // whatever it starts with, whether that's 'absolute' like /foo/bar, + // or 'relative' like '../baz' + prefix = pattern.slice(0, n).join('/') + break + } + + var remain = pattern.slice(n) + + // get the list of entries. + var read + if (prefix === null) + read = '.' + else if (isAbsolute(prefix) || isAbsolute(pattern.join('/'))) { + if (!prefix || !isAbsolute(prefix)) + prefix = '/' + prefix + read = prefix + } else + read = prefix + + var abs = this._makeAbs(read) + + //if ignored, skip processing + if (childrenIgnored(this, read)) + return + + var isGlobStar = remain[0] === minimatch.GLOBSTAR + if (isGlobStar) + this._processGlobStar(prefix, read, abs, remain, index, inGlobStar) + else + this._processReaddir(prefix, read, abs, remain, index, inGlobStar) +} + + +GlobSync.prototype._processReaddir = function (prefix, read, abs, remain, index, inGlobStar) { + var entries = this._readdir(abs, inGlobStar) + + // if the abs isn't a dir, then nothing can match! + if (!entries) + return + + // It will only match dot entries if it starts with a dot, or if + // dot is set. Stuff like @(.foo|.bar) isn't allowed. + var pn = remain[0] + var negate = !!this.minimatch.negate + var rawGlob = pn._glob + var dotOk = this.dot || rawGlob.charAt(0) === '.' + + var matchedEntries = [] + for (var i = 0; i < entries.length; i++) { + var e = entries[i] + if (e.charAt(0) !== '.' || dotOk) { + var m + if (negate && !prefix) { + m = !e.match(pn) + } else { + m = e.match(pn) + } + if (m) + matchedEntries.push(e) + } + } + + var len = matchedEntries.length + // If there are no matched entries, then nothing matches. + if (len === 0) + return + + // if this is the last remaining pattern bit, then no need for + // an additional stat *unless* the user has specified mark or + // stat explicitly. We know they exist, since readdir returned + // them. + + if (remain.length === 1 && !this.mark && !this.stat) { + if (!this.matches[index]) + this.matches[index] = Object.create(null) + + for (var i = 0; i < len; i ++) { + var e = matchedEntries[i] + if (prefix) { + if (prefix.slice(-1) !== '/') + e = prefix + '/' + e + else + e = prefix + e + } + + if (e.charAt(0) === '/' && !this.nomount) { + e = path.join(this.root, e) + } + this._emitMatch(index, e) + } + // This was the last one, and no stats were needed + return + } + + // now test all matched entries as stand-ins for that part + // of the pattern. + remain.shift() + for (var i = 0; i < len; i ++) { + var e = matchedEntries[i] + var newPattern + if (prefix) + newPattern = [prefix, e] + else + newPattern = [e] + this._process(newPattern.concat(remain), index, inGlobStar) + } +} + + +GlobSync.prototype._emitMatch = function (index, e) { + if (isIgnored(this, e)) + return + + var abs = this._makeAbs(e) + + if (this.mark) + e = this._mark(e) + + if (this.absolute) { + e = abs + } + + if (this.matches[index][e]) + return + + if (this.nodir) { + var c = this.cache[abs] + if (c === 'DIR' || Array.isArray(c)) + return + } + + this.matches[index][e] = true + + if (this.stat) + this._stat(e) +} + + +GlobSync.prototype._readdirInGlobStar = function (abs) { + // follow all symlinked directories forever + // just proceed as if this is a non-globstar situation + if (this.follow) + return this._readdir(abs, false) + + var entries + var lstat + var stat + try { + lstat = this.fs.lstatSync(abs) + } catch (er) { + if (er.code === 'ENOENT') { + // lstat failed, doesn't exist + return null + } + } + + var isSym = lstat && lstat.isSymbolicLink() + this.symlinks[abs] = isSym + + // If it's not a symlink or a dir, then it's definitely a regular file. + // don't bother doing a readdir in that case. + if (!isSym && lstat && !lstat.isDirectory()) + this.cache[abs] = 'FILE' + else + entries = this._readdir(abs, false) + + return entries +} + +GlobSync.prototype._readdir = function (abs, inGlobStar) { + var entries + + if (inGlobStar && !ownProp(this.symlinks, abs)) + return this._readdirInGlobStar(abs) + + if (ownProp(this.cache, abs)) { + var c = this.cache[abs] + if (!c || c === 'FILE') + return null + + if (Array.isArray(c)) + return c + } + + try { + return this._readdirEntries(abs, this.fs.readdirSync(abs)) + } catch (er) { + this._readdirError(abs, er) + return null + } +} + +GlobSync.prototype._readdirEntries = function (abs, entries) { + // if we haven't asked to stat everything, then just + // assume that everything in there exists, so we can avoid + // having to stat it a second time. + if (!this.mark && !this.stat) { + for (var i = 0; i < entries.length; i ++) { + var e = entries[i] + if (abs === '/') + e = abs + e + else + e = abs + '/' + e + this.cache[e] = true + } + } + + this.cache[abs] = entries + + // mark and cache dir-ness + return entries +} + +GlobSync.prototype._readdirError = function (f, er) { + // handle errors, and cache the information + switch (er.code) { + case 'ENOTSUP': // https://github.com/isaacs/node-glob/issues/205 + case 'ENOTDIR': // totally normal. means it *does* exist. + var abs = this._makeAbs(f) + this.cache[abs] = 'FILE' + if (abs === this.cwdAbs) { + var error = new Error(er.code + ' invalid cwd ' + this.cwd) + error.path = this.cwd + error.code = er.code + throw error + } + break + + case 'ENOENT': // not terribly unusual + case 'ELOOP': + case 'ENAMETOOLONG': + case 'UNKNOWN': + this.cache[this._makeAbs(f)] = false + break + + default: // some unusual error. Treat as failure. + this.cache[this._makeAbs(f)] = false + if (this.strict) + throw er + if (!this.silent) + console.error('glob error', er) + break + } +} + +GlobSync.prototype._processGlobStar = function (prefix, read, abs, remain, index, inGlobStar) { + + var entries = this._readdir(abs, inGlobStar) + + // no entries means not a dir, so it can never have matches + // foo.txt/** doesn't match foo.txt + if (!entries) + return + + // test without the globstar, and with every child both below + // and replacing the globstar. + var remainWithoutGlobStar = remain.slice(1) + var gspref = prefix ? [ prefix ] : [] + var noGlobStar = gspref.concat(remainWithoutGlobStar) + + // the noGlobStar pattern exits the inGlobStar state + this._process(noGlobStar, index, false) + + var len = entries.length + var isSym = this.symlinks[abs] + + // If it's a symlink, and we're in a globstar, then stop + if (isSym && inGlobStar) + return + + for (var i = 0; i < len; i++) { + var e = entries[i] + if (e.charAt(0) === '.' && !this.dot) + continue + + // these two cases enter the inGlobStar state + var instead = gspref.concat(entries[i], remainWithoutGlobStar) + this._process(instead, index, true) + + var below = gspref.concat(entries[i], remain) + this._process(below, index, true) + } +} + +GlobSync.prototype._processSimple = function (prefix, index) { + // XXX review this. Shouldn't it be doing the mounting etc + // before doing stat? kinda weird? + var exists = this._stat(prefix) + + if (!this.matches[index]) + this.matches[index] = Object.create(null) + + // If it doesn't exist, then just mark the lack of results + if (!exists) + return + + if (prefix && isAbsolute(prefix) && !this.nomount) { + var trail = /[\/\\]$/.test(prefix) + if (prefix.charAt(0) === '/') { + prefix = path.join(this.root, prefix) + } else { + prefix = path.resolve(this.root, prefix) + if (trail) + prefix += '/' + } + } + + if (process.platform === 'win32') + prefix = prefix.replace(/\\/g, '/') + + // Mark this as a match + this._emitMatch(index, prefix) +} + +// Returns either 'DIR', 'FILE', or false +GlobSync.prototype._stat = function (f) { + var abs = this._makeAbs(f) + var needDir = f.slice(-1) === '/' + + if (f.length > this.maxLength) + return false + + if (!this.stat && ownProp(this.cache, abs)) { + var c = this.cache[abs] + + if (Array.isArray(c)) + c = 'DIR' + + // It exists, but maybe not how we need it + if (!needDir || c === 'DIR') + return c + + if (needDir && c === 'FILE') + return false + + // otherwise we have to stat, because maybe c=true + // if we know it exists, but not what it is. + } + + var exists + var stat = this.statCache[abs] + if (!stat) { + var lstat + try { + lstat = this.fs.lstatSync(abs) + } catch (er) { + if (er && (er.code === 'ENOENT' || er.code === 'ENOTDIR')) { + this.statCache[abs] = false + return false + } + } + + if (lstat && lstat.isSymbolicLink()) { + try { + stat = this.fs.statSync(abs) + } catch (er) { + stat = lstat + } + } else { + stat = lstat + } + } + + this.statCache[abs] = stat + + var c = true + if (stat) + c = stat.isDirectory() ? 'DIR' : 'FILE' + + this.cache[abs] = this.cache[abs] || c + + if (needDir && c === 'FILE') + return false + + return c +} + +GlobSync.prototype._mark = function (p) { + return common.mark(this, p) +} + +GlobSync.prototype._makeAbs = function (f) { + return common.makeAbs(this, f) +} + + +/***/ }), + +/***/ 848: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var wrappy = __nccwpck_require__(5448) +var reqs = Object.create(null) +var once = __nccwpck_require__(447) + +module.exports = wrappy(inflight) + +function inflight (key, cb) { + if (reqs[key]) { + reqs[key].push(cb) + return null + } else { + reqs[key] = [cb] + return makeres(key) + } +} + +function makeres (key) { + return once(function RES () { + var cbs = reqs[key] + var len = cbs.length + var args = slice(arguments) + + // XXX It's somewhat ambiguous whether a new callback added in this + // pass should be queued for later execution if something in the + // list of callbacks throws, or if it should just be discarded. + // However, it's such an edge case that it hardly matters, and either + // choice is likely as surprising as the other. + // As it happens, we do go ahead and schedule it for later execution. + try { + for (var i = 0; i < len; i++) { + cbs[i].apply(null, args) + } + } finally { + if (cbs.length > len) { + // added more in the interim. + // de-zalgo, just in case, but don't call again. + cbs.splice(0, len) + process.nextTick(function () { + RES.apply(null, args) + }) + } else { + delete reqs[key] + } + } + }) +} + +function slice (args) { + var length = args.length + var array = [] + + for (var i = 0; i < length; i++) array[i] = args[i] + return array +} + + +/***/ }), + +/***/ 5635: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +try { + var util = __nccwpck_require__(3837); + /* istanbul ignore next */ + if (typeof util.inherits !== 'function') throw ''; + module.exports = util.inherits; +} catch (e) { + /* istanbul ignore next */ + module.exports = __nccwpck_require__(6262); +} + + +/***/ }), + +/***/ 6262: +/***/ ((module) => { + +if (typeof Object.create === 'function') { + // implementation from standard node.js 'util' module + module.exports = function inherits(ctor, superCtor) { + if (superCtor) { + ctor.super_ = superCtor + ctor.prototype = Object.create(superCtor.prototype, { + constructor: { + value: ctor, + enumerable: false, + writable: true, + configurable: true + } + }) + } + }; +} else { + // old school shim for old browsers + module.exports = function inherits(ctor, superCtor) { + if (superCtor) { + ctor.super_ = superCtor + var TempCtor = function () {} + TempCtor.prototype = superCtor.prototype + ctor.prototype = new TempCtor() + ctor.prototype.constructor = ctor + } + } +} + + +/***/ }), + +/***/ 4202: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ value: true })); + +/*! + * is-plain-object + * + * Copyright (c) 2014-2017, Jon Schlinkert. + * Released under the MIT License. + */ + +function isObject(o) { + return Object.prototype.toString.call(o) === '[object Object]'; +} + +function isPlainObject(o) { + var ctor,prot; + + if (isObject(o) === false) return false; + + // If has modified constructor + ctor = o.constructor; + if (ctor === undefined) return true; + + // If has modified prototype + prot = ctor.prototype; + if (isObject(prot) === false) return false; + + // If constructor does not have an Object-specific method + if (prot.hasOwnProperty('isPrototypeOf') === false) { + return false; + } + + // Most likely a plain Object + return true; +} + +exports.isPlainObject = isPlainObject; + + +/***/ }), + +/***/ 1486: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +module.exports = minimatch +minimatch.Minimatch = Minimatch + +var path = { sep: '/' } +try { + path = __nccwpck_require__(1017) +} catch (er) {} + +var GLOBSTAR = minimatch.GLOBSTAR = Minimatch.GLOBSTAR = {} +var expand = __nccwpck_require__(839) + +var plTypes = { + '!': { open: '(?:(?!(?:', close: '))[^/]*?)'}, + '?': { open: '(?:', close: ')?' }, + '+': { open: '(?:', close: ')+' }, + '*': { open: '(?:', close: ')*' }, + '@': { open: '(?:', close: ')' } +} + +// any single thing other than / +// don't need to escape / when using new RegExp() +var qmark = '[^/]' + +// * => any number of characters +var star = qmark + '*?' + +// ** when dots are allowed. Anything goes, except .. and . +// not (^ or / followed by one or two dots followed by $ or /), +// followed by anything, any number of times. +var twoStarDot = '(?:(?!(?:\\\/|^)(?:\\.{1,2})($|\\\/)).)*?' + +// not a ^ or / followed by a dot, +// followed by anything, any number of times. +var twoStarNoDot = '(?:(?!(?:\\\/|^)\\.).)*?' + +// characters that need to be escaped in RegExp. +var reSpecials = charSet('().*{}+?[]^$\\!') + +// "abc" -> { a:true, b:true, c:true } +function charSet (s) { + return s.split('').reduce(function (set, c) { + set[c] = true + return set + }, {}) +} + +// normalizes slashes. +var slashSplit = /\/+/ + +minimatch.filter = filter +function filter (pattern, options) { + options = options || {} + return function (p, i, list) { + return minimatch(p, pattern, options) + } +} + +function ext (a, b) { + a = a || {} + b = b || {} + var t = {} + Object.keys(b).forEach(function (k) { + t[k] = b[k] + }) + Object.keys(a).forEach(function (k) { + t[k] = a[k] + }) + return t +} + +minimatch.defaults = function (def) { + if (!def || !Object.keys(def).length) return minimatch + + var orig = minimatch + + var m = function minimatch (p, pattern, options) { + return orig.minimatch(p, pattern, ext(def, options)) + } + + m.Minimatch = function Minimatch (pattern, options) { + return new orig.Minimatch(pattern, ext(def, options)) + } + + return m +} + +Minimatch.defaults = function (def) { + if (!def || !Object.keys(def).length) return Minimatch + return minimatch.defaults(def).Minimatch +} + +function minimatch (p, pattern, options) { + if (typeof pattern !== 'string') { + throw new TypeError('glob pattern string required') + } + + if (!options) options = {} + + // shortcut: comments match nothing. + if (!options.nocomment && pattern.charAt(0) === '#') { + return false + } + + // "" only matches "" + if (pattern.trim() === '') return p === '' + + return new Minimatch(pattern, options).match(p) +} + +function Minimatch (pattern, options) { + if (!(this instanceof Minimatch)) { + return new Minimatch(pattern, options) + } + + if (typeof pattern !== 'string') { + throw new TypeError('glob pattern string required') + } + + if (!options) options = {} + pattern = pattern.trim() + + // windows support: need to use /, not \ + if (path.sep !== '/') { + pattern = pattern.split(path.sep).join('/') + } + + this.options = options + this.set = [] + this.pattern = pattern + this.regexp = null + this.negate = false + this.comment = false + this.empty = false + + // make the set of regexps etc. + this.make() +} + +Minimatch.prototype.debug = function () {} + +Minimatch.prototype.make = make +function make () { + // don't do it more than once. + if (this._made) return + + var pattern = this.pattern + var options = this.options + + // empty patterns and comments match nothing. + if (!options.nocomment && pattern.charAt(0) === '#') { + this.comment = true + return + } + if (!pattern) { + this.empty = true + return + } + + // step 1: figure out negation, etc. + this.parseNegate() + + // step 2: expand braces + var set = this.globSet = this.braceExpand() + + if (options.debug) this.debug = console.error + + this.debug(this.pattern, set) + + // step 3: now we have a set, so turn each one into a series of path-portion + // matching patterns. + // These will be regexps, except in the case of "**", which is + // set to the GLOBSTAR object for globstar behavior, + // and will not contain any / characters + set = this.globParts = set.map(function (s) { + return s.split(slashSplit) + }) + + this.debug(this.pattern, set) + + // glob --> regexps + set = set.map(function (s, si, set) { + return s.map(this.parse, this) + }, this) + + this.debug(this.pattern, set) + + // filter out everything that didn't compile properly. + set = set.filter(function (s) { + return s.indexOf(false) === -1 + }) + + this.debug(this.pattern, set) + + this.set = set +} + +Minimatch.prototype.parseNegate = parseNegate +function parseNegate () { + var pattern = this.pattern + var negate = false + var options = this.options + var negateOffset = 0 + + if (options.nonegate) return + + for (var i = 0, l = pattern.length + ; i < l && pattern.charAt(i) === '!' + ; i++) { + negate = !negate + negateOffset++ + } + + if (negateOffset) this.pattern = pattern.substr(negateOffset) + this.negate = negate +} + +// Brace expansion: +// a{b,c}d -> abd acd +// a{b,}c -> abc ac +// a{0..3}d -> a0d a1d a2d a3d +// a{b,c{d,e}f}g -> abg acdfg acefg +// a{b,c}d{e,f}g -> abdeg acdeg abdeg abdfg +// +// Invalid sets are not expanded. +// a{2..}b -> a{2..}b +// a{b}c -> a{b}c +minimatch.braceExpand = function (pattern, options) { + return braceExpand(pattern, options) +} + +Minimatch.prototype.braceExpand = braceExpand + +function braceExpand (pattern, options) { + if (!options) { + if (this instanceof Minimatch) { + options = this.options + } else { + options = {} + } + } + + pattern = typeof pattern === 'undefined' + ? this.pattern : pattern + + if (typeof pattern === 'undefined') { + throw new TypeError('undefined pattern') + } + + if (options.nobrace || + !pattern.match(/\{.*\}/)) { + // shortcut. no need to expand. + return [pattern] + } + + return expand(pattern) +} + +// parse a component of the expanded set. +// At this point, no pattern may contain "/" in it +// so we're going to return a 2d array, where each entry is the full +// pattern, split on '/', and then turned into a regular expression. +// A regexp is made at the end which joins each array with an +// escaped /, and another full one which joins each regexp with |. +// +// Following the lead of Bash 4.1, note that "**" only has special meaning +// when it is the *only* thing in a path portion. Otherwise, any series +// of * is equivalent to a single *. Globstar behavior is enabled by +// default, and can be disabled by setting options.noglobstar. +Minimatch.prototype.parse = parse +var SUBPARSE = {} +function parse (pattern, isSub) { + if (pattern.length > 1024 * 64) { + throw new TypeError('pattern is too long') + } + + var options = this.options + + // shortcuts + if (!options.noglobstar && pattern === '**') return GLOBSTAR + if (pattern === '') return '' + + var re = '' + var hasMagic = !!options.nocase + var escaping = false + // ? => one single character + var patternListStack = [] + var negativeLists = [] + var stateChar + var inClass = false + var reClassStart = -1 + var classStart = -1 + // . and .. never match anything that doesn't start with ., + // even when options.dot is set. + var patternStart = pattern.charAt(0) === '.' ? '' // anything + // not (start or / followed by . or .. followed by / or end) + : options.dot ? '(?!(?:^|\\\/)\\.{1,2}(?:$|\\\/))' + : '(?!\\.)' + var self = this + + function clearStateChar () { + if (stateChar) { + // we had some state-tracking character + // that wasn't consumed by this pass. + switch (stateChar) { + case '*': + re += star + hasMagic = true + break + case '?': + re += qmark + hasMagic = true + break + default: + re += '\\' + stateChar + break + } + self.debug('clearStateChar %j %j', stateChar, re) + stateChar = false + } + } + + for (var i = 0, len = pattern.length, c + ; (i < len) && (c = pattern.charAt(i)) + ; i++) { + this.debug('%s\t%s %s %j', pattern, i, re, c) + + // skip over any that are escaped. + if (escaping && reSpecials[c]) { + re += '\\' + c + escaping = false + continue + } + + switch (c) { + case '/': + // completely not allowed, even escaped. + // Should already be path-split by now. + return false + + case '\\': + clearStateChar() + escaping = true + continue + + // the various stateChar values + // for the "extglob" stuff. + case '?': + case '*': + case '+': + case '@': + case '!': + this.debug('%s\t%s %s %j <-- stateChar', pattern, i, re, c) + + // all of those are literals inside a class, except that + // the glob [!a] means [^a] in regexp + if (inClass) { + this.debug(' in class') + if (c === '!' && i === classStart + 1) c = '^' + re += c + continue + } + + // if we already have a stateChar, then it means + // that there was something like ** or +? in there. + // Handle the stateChar, then proceed with this one. + self.debug('call clearStateChar %j', stateChar) + clearStateChar() + stateChar = c + // if extglob is disabled, then +(asdf|foo) isn't a thing. + // just clear the statechar *now*, rather than even diving into + // the patternList stuff. + if (options.noext) clearStateChar() + continue + + case '(': + if (inClass) { + re += '(' + continue + } + + if (!stateChar) { + re += '\\(' + continue + } + + patternListStack.push({ + type: stateChar, + start: i - 1, + reStart: re.length, + open: plTypes[stateChar].open, + close: plTypes[stateChar].close + }) + // negation is (?:(?!js)[^/]*) + re += stateChar === '!' ? '(?:(?!(?:' : '(?:' + this.debug('plType %j %j', stateChar, re) + stateChar = false + continue + + case ')': + if (inClass || !patternListStack.length) { + re += '\\)' + continue + } + + clearStateChar() + hasMagic = true + var pl = patternListStack.pop() + // negation is (?:(?!js)[^/]*) + // The others are (?:) + re += pl.close + if (pl.type === '!') { + negativeLists.push(pl) + } + pl.reEnd = re.length + continue + + case '|': + if (inClass || !patternListStack.length || escaping) { + re += '\\|' + escaping = false + continue + } + + clearStateChar() + re += '|' + continue + + // these are mostly the same in regexp and glob + case '[': + // swallow any state-tracking char before the [ + clearStateChar() + + if (inClass) { + re += '\\' + c + continue + } + + inClass = true + classStart = i + reClassStart = re.length + re += c + continue + + case ']': + // a right bracket shall lose its special + // meaning and represent itself in + // a bracket expression if it occurs + // first in the list. -- POSIX.2 2.8.3.2 + if (i === classStart + 1 || !inClass) { + re += '\\' + c + escaping = false + continue + } + + // handle the case where we left a class open. + // "[z-a]" is valid, equivalent to "\[z-a\]" + if (inClass) { + // split where the last [ was, make sure we don't have + // an invalid re. if so, re-walk the contents of the + // would-be class to re-translate any characters that + // were passed through as-is + // TODO: It would probably be faster to determine this + // without a try/catch and a new RegExp, but it's tricky + // to do safely. For now, this is safe and works. + var cs = pattern.substring(classStart + 1, i) + try { + RegExp('[' + cs + ']') + } catch (er) { + // not a valid class! + var sp = this.parse(cs, SUBPARSE) + re = re.substr(0, reClassStart) + '\\[' + sp[0] + '\\]' + hasMagic = hasMagic || sp[1] + inClass = false + continue + } + } + + // finish up the class. + hasMagic = true + inClass = false + re += c + continue + + default: + // swallow any state char that wasn't consumed + clearStateChar() + + if (escaping) { + // no need + escaping = false + } else if (reSpecials[c] + && !(c === '^' && inClass)) { + re += '\\' + } + + re += c + + } // switch + } // for + + // handle the case where we left a class open. + // "[abc" is valid, equivalent to "\[abc" + if (inClass) { + // split where the last [ was, and escape it + // this is a huge pita. We now have to re-walk + // the contents of the would-be class to re-translate + // any characters that were passed through as-is + cs = pattern.substr(classStart + 1) + sp = this.parse(cs, SUBPARSE) + re = re.substr(0, reClassStart) + '\\[' + sp[0] + hasMagic = hasMagic || sp[1] + } + + // handle the case where we had a +( thing at the *end* + // of the pattern. + // each pattern list stack adds 3 chars, and we need to go through + // and escape any | chars that were passed through as-is for the regexp. + // Go through and escape them, taking care not to double-escape any + // | chars that were already escaped. + for (pl = patternListStack.pop(); pl; pl = patternListStack.pop()) { + var tail = re.slice(pl.reStart + pl.open.length) + this.debug('setting tail', re, pl) + // maybe some even number of \, then maybe 1 \, followed by a | + tail = tail.replace(/((?:\\{2}){0,64})(\\?)\|/g, function (_, $1, $2) { + if (!$2) { + // the | isn't already escaped, so escape it. + $2 = '\\' + } + + // need to escape all those slashes *again*, without escaping the + // one that we need for escaping the | character. As it works out, + // escaping an even number of slashes can be done by simply repeating + // it exactly after itself. That's why this trick works. + // + // I am sorry that you have to see this. + return $1 + $1 + $2 + '|' + }) + + this.debug('tail=%j\n %s', tail, tail, pl, re) + var t = pl.type === '*' ? star + : pl.type === '?' ? qmark + : '\\' + pl.type + + hasMagic = true + re = re.slice(0, pl.reStart) + t + '\\(' + tail + } + + // handle trailing things that only matter at the very end. + clearStateChar() + if (escaping) { + // trailing \\ + re += '\\\\' + } + + // only need to apply the nodot start if the re starts with + // something that could conceivably capture a dot + var addPatternStart = false + switch (re.charAt(0)) { + case '.': + case '[': + case '(': addPatternStart = true + } + + // Hack to work around lack of negative lookbehind in JS + // A pattern like: *.!(x).!(y|z) needs to ensure that a name + // like 'a.xyz.yz' doesn't match. So, the first negative + // lookahead, has to look ALL the way ahead, to the end of + // the pattern. + for (var n = negativeLists.length - 1; n > -1; n--) { + var nl = negativeLists[n] + + var nlBefore = re.slice(0, nl.reStart) + var nlFirst = re.slice(nl.reStart, nl.reEnd - 8) + var nlLast = re.slice(nl.reEnd - 8, nl.reEnd) + var nlAfter = re.slice(nl.reEnd) + + nlLast += nlAfter + + // Handle nested stuff like *(*.js|!(*.json)), where open parens + // mean that we should *not* include the ) in the bit that is considered + // "after" the negated section. + var openParensBefore = nlBefore.split('(').length - 1 + var cleanAfter = nlAfter + for (i = 0; i < openParensBefore; i++) { + cleanAfter = cleanAfter.replace(/\)[+*?]?/, '') + } + nlAfter = cleanAfter + + var dollar = '' + if (nlAfter === '' && isSub !== SUBPARSE) { + dollar = '$' + } + var newRe = nlBefore + nlFirst + nlAfter + dollar + nlLast + re = newRe + } + + // if the re is not "" at this point, then we need to make sure + // it doesn't match against an empty path part. + // Otherwise a/* will match a/, which it should not. + if (re !== '' && hasMagic) { + re = '(?=.)' + re + } + + if (addPatternStart) { + re = patternStart + re + } + + // parsing just a piece of a larger pattern. + if (isSub === SUBPARSE) { + return [re, hasMagic] + } + + // skip the regexp for non-magical patterns + // unescape anything in it, though, so that it'll be + // an exact match against a file etc. + if (!hasMagic) { + return globUnescape(pattern) + } + + var flags = options.nocase ? 'i' : '' + try { + var regExp = new RegExp('^' + re + '$', flags) + } catch (er) { + // If it was an invalid regular expression, then it can't match + // anything. This trick looks for a character after the end of + // the string, which is of course impossible, except in multi-line + // mode, but it's not a /m regex. + return new RegExp('$.') + } + + regExp._glob = pattern + regExp._src = re + + return regExp +} + +minimatch.makeRe = function (pattern, options) { + return new Minimatch(pattern, options || {}).makeRe() +} + +Minimatch.prototype.makeRe = makeRe +function makeRe () { + if (this.regexp || this.regexp === false) return this.regexp + + // at this point, this.set is a 2d array of partial + // pattern strings, or "**". + // + // It's better to use .match(). This function shouldn't + // be used, really, but it's pretty convenient sometimes, + // when you just want to work with a regex. + var set = this.set + + if (!set.length) { + this.regexp = false + return this.regexp + } + var options = this.options + + var twoStar = options.noglobstar ? star + : options.dot ? twoStarDot + : twoStarNoDot + var flags = options.nocase ? 'i' : '' + + var re = set.map(function (pattern) { + return pattern.map(function (p) { + return (p === GLOBSTAR) ? twoStar + : (typeof p === 'string') ? regExpEscape(p) + : p._src + }).join('\\\/') + }).join('|') + + // must match entire pattern + // ending in a * or ** will make it less strict. + re = '^(?:' + re + ')$' + + // can match anything, as long as it's not this. + if (this.negate) re = '^(?!' + re + ').*$' + + try { + this.regexp = new RegExp(re, flags) + } catch (ex) { + this.regexp = false + } + return this.regexp +} + +minimatch.match = function (list, pattern, options) { + options = options || {} + var mm = new Minimatch(pattern, options) + list = list.filter(function (f) { + return mm.match(f) + }) + if (mm.options.nonull && !list.length) { + list.push(pattern) + } + return list +} + +Minimatch.prototype.match = match +function match (f, partial) { + this.debug('match', f, this.pattern) + // short-circuit in the case of busted things. + // comments, etc. + if (this.comment) return false + if (this.empty) return f === '' + + if (f === '/' && partial) return true + + var options = this.options + + // windows: need to use /, not \ + if (path.sep !== '/') { + f = f.split(path.sep).join('/') + } + + // treat the test path as a set of pathparts. + f = f.split(slashSplit) + this.debug(this.pattern, 'split', f) + + // just ONE of the pattern sets in this.set needs to match + // in order for it to be valid. If negating, then just one + // match means that we have failed. + // Either way, return on the first hit. + + var set = this.set + this.debug(this.pattern, 'set', set) + + // Find the basename of the path by looking for the last non-empty segment + var filename + var i + for (i = f.length - 1; i >= 0; i--) { + filename = f[i] + if (filename) break + } + + for (i = 0; i < set.length; i++) { + var pattern = set[i] + var file = f + if (options.matchBase && pattern.length === 1) { + file = [filename] + } + var hit = this.matchOne(file, pattern, partial) + if (hit) { + if (options.flipNegate) return true + return !this.negate + } + } + + // didn't get any hits. this is success if it's a negative + // pattern, failure otherwise. + if (options.flipNegate) return false + return this.negate +} + +// set partial to true to test if, for example, +// "/a/b" matches the start of "/*/b/*/d" +// Partial means, if you run out of file before you run +// out of pattern, then that's fine, as long as all +// the parts match. +Minimatch.prototype.matchOne = function (file, pattern, partial) { + var options = this.options + + this.debug('matchOne', + { 'this': this, file: file, pattern: pattern }) + + this.debug('matchOne', file.length, pattern.length) + + for (var fi = 0, + pi = 0, + fl = file.length, + pl = pattern.length + ; (fi < fl) && (pi < pl) + ; fi++, pi++) { + this.debug('matchOne loop') + var p = pattern[pi] + var f = file[fi] + + this.debug(pattern, p, f) + + // should be impossible. + // some invalid regexp stuff in the set. + if (p === false) return false + + if (p === GLOBSTAR) { + this.debug('GLOBSTAR', [pattern, p, f]) + + // "**" + // a/**/b/**/c would match the following: + // a/b/x/y/z/c + // a/x/y/z/b/c + // a/b/x/b/x/c + // a/b/c + // To do this, take the rest of the pattern after + // the **, and see if it would match the file remainder. + // If so, return success. + // If not, the ** "swallows" a segment, and try again. + // This is recursively awful. + // + // a/**/b/**/c matching a/b/x/y/z/c + // - a matches a + // - doublestar + // - matchOne(b/x/y/z/c, b/**/c) + // - b matches b + // - doublestar + // - matchOne(x/y/z/c, c) -> no + // - matchOne(y/z/c, c) -> no + // - matchOne(z/c, c) -> no + // - matchOne(c, c) yes, hit + var fr = fi + var pr = pi + 1 + if (pr === pl) { + this.debug('** at the end') + // a ** at the end will just swallow the rest. + // We have found a match. + // however, it will not swallow /.x, unless + // options.dot is set. + // . and .. are *never* matched by **, for explosively + // exponential reasons. + for (; fi < fl; fi++) { + if (file[fi] === '.' || file[fi] === '..' || + (!options.dot && file[fi].charAt(0) === '.')) return false + } + return true + } + + // ok, let's see if we can swallow whatever we can. + while (fr < fl) { + var swallowee = file[fr] + + this.debug('\nglobstar while', file, fr, pattern, pr, swallowee) + + // XXX remove this slice. Just pass the start index. + if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) { + this.debug('globstar found match!', fr, fl, swallowee) + // found a match. + return true + } else { + // can't swallow "." or ".." ever. + // can only swallow ".foo" when explicitly asked. + if (swallowee === '.' || swallowee === '..' || + (!options.dot && swallowee.charAt(0) === '.')) { + this.debug('dot detected!', file, fr, pattern, pr) + break + } + + // ** swallows a segment, and continue. + this.debug('globstar swallow a segment, and continue') + fr++ + } + } + + // no match was found. + // However, in partial mode, we can't say this is necessarily over. + // If there's more *pattern* left, then + if (partial) { + // ran out of file + this.debug('\n>>> no match, partial?', file, fr, pattern, pr) + if (fr === fl) return true + } + return false + } + + // something other than ** + // non-magic patterns just have to match exactly + // patterns with magic have been turned into regexps. + var hit + if (typeof p === 'string') { + if (options.nocase) { + hit = f.toLowerCase() === p.toLowerCase() + } else { + hit = f === p + } + this.debug('string match', p, f, hit) + } else { + hit = f.match(p) + this.debug('pattern match', p, f, hit) + } + + if (!hit) return false + } + + // Note: ending in / means that we'll get a final "" + // at the end of the pattern. This can only match a + // corresponding "" at the end of the file. + // If the file ends in /, then it can only match a + // a pattern that ends in /, unless the pattern just + // doesn't have any more for it. But, a/b/ should *not* + // match "a/b/*", even though "" matches against the + // [^/]*? pattern, except in partial mode, where it might + // simply not be reached yet. + // However, a/b/ should still satisfy a/* + + // now either we fell off the end of the pattern, or we're done. + if (fi === fl && pi === pl) { + // ran out of pattern and filename at the same time. + // an exact hit! + return true + } else if (fi === fl) { + // ran out of file, but still had pattern left. + // this is ok if we're doing the match as part of + // a glob fs traversal. + return partial + } else if (pi === pl) { + // ran out of pattern, still have file left. + // this is only acceptable if we're on the very last + // empty segment of a file with a trailing slash. + // a/* should match a/b/ + var emptyFileEnd = (fi === fl - 1) && (file[fi] === '') + return emptyFileEnd + } + + // should be unreachable. + throw new Error('wtf?') +} + +// replace stuff like \* with * +function globUnescape (s) { + return s.replace(/\\(.)/g, '$1') +} + +function regExpEscape (s) { + return s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&') +} + + +/***/ }), + +/***/ 1307: +/***/ ((module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ value: true })); + +function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } + +var Stream = _interopDefault(__nccwpck_require__(2781)); +var http = _interopDefault(__nccwpck_require__(3685)); +var Url = _interopDefault(__nccwpck_require__(7310)); +var https = _interopDefault(__nccwpck_require__(5687)); +var zlib = _interopDefault(__nccwpck_require__(9796)); + +// Based on https://github.com/tmpvar/jsdom/blob/aa85b2abf07766ff7bf5c1f6daafb3726f2f2db5/lib/jsdom/living/blob.js + +// fix for "Readable" isn't a named export issue +const Readable = Stream.Readable; + +const BUFFER = Symbol('buffer'); +const TYPE = Symbol('type'); + +class Blob { + constructor() { + this[TYPE] = ''; + + const blobParts = arguments[0]; + const options = arguments[1]; + + const buffers = []; + let size = 0; + + if (blobParts) { + const a = blobParts; + const length = Number(a.length); + for (let i = 0; i < length; i++) { + const element = a[i]; + let buffer; + if (element instanceof Buffer) { + buffer = element; + } else if (ArrayBuffer.isView(element)) { + buffer = Buffer.from(element.buffer, element.byteOffset, element.byteLength); + } else if (element instanceof ArrayBuffer) { + buffer = Buffer.from(element); + } else if (element instanceof Blob) { + buffer = element[BUFFER]; + } else { + buffer = Buffer.from(typeof element === 'string' ? element : String(element)); + } + size += buffer.length; + buffers.push(buffer); + } + } + + this[BUFFER] = Buffer.concat(buffers); + + let type = options && options.type !== undefined && String(options.type).toLowerCase(); + if (type && !/[^\u0020-\u007E]/.test(type)) { + this[TYPE] = type; + } + } + get size() { + return this[BUFFER].length; + } + get type() { + return this[TYPE]; + } + text() { + return Promise.resolve(this[BUFFER].toString()); + } + arrayBuffer() { + const buf = this[BUFFER]; + const ab = buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); + return Promise.resolve(ab); + } + stream() { + const readable = new Readable(); + readable._read = function () {}; + readable.push(this[BUFFER]); + readable.push(null); + return readable; + } + toString() { + return '[object Blob]'; + } + slice() { + const size = this.size; + + const start = arguments[0]; + const end = arguments[1]; + let relativeStart, relativeEnd; + if (start === undefined) { + relativeStart = 0; + } else if (start < 0) { + relativeStart = Math.max(size + start, 0); + } else { + relativeStart = Math.min(start, size); + } + if (end === undefined) { + relativeEnd = size; + } else if (end < 0) { + relativeEnd = Math.max(size + end, 0); + } else { + relativeEnd = Math.min(end, size); + } + const span = Math.max(relativeEnd - relativeStart, 0); + + const buffer = this[BUFFER]; + const slicedBuffer = buffer.slice(relativeStart, relativeStart + span); + const blob = new Blob([], { type: arguments[2] }); + blob[BUFFER] = slicedBuffer; + return blob; + } +} + +Object.defineProperties(Blob.prototype, { + size: { enumerable: true }, + type: { enumerable: true }, + slice: { enumerable: true } +}); + +Object.defineProperty(Blob.prototype, Symbol.toStringTag, { + value: 'Blob', + writable: false, + enumerable: false, + configurable: true +}); + +/** + * fetch-error.js + * + * FetchError interface for operational errors + */ + +/** + * Create FetchError instance + * + * @param String message Error message for human + * @param String type Error type for machine + * @param String systemError For Node.js system error + * @return FetchError + */ +function FetchError(message, type, systemError) { + Error.call(this, message); + + this.message = message; + this.type = type; + + // when err.type is `system`, err.code contains system error code + if (systemError) { + this.code = this.errno = systemError.code; + } + + // hide custom error implementation details from end-users + Error.captureStackTrace(this, this.constructor); +} + +FetchError.prototype = Object.create(Error.prototype); +FetchError.prototype.constructor = FetchError; +FetchError.prototype.name = 'FetchError'; + +let convert; +try { + convert = (__nccwpck_require__(7923).convert); +} catch (e) {} + +const INTERNALS = Symbol('Body internals'); + +// fix an issue where "PassThrough" isn't a named export for node <10 +const PassThrough = Stream.PassThrough; + +/** + * Body mixin + * + * Ref: https://fetch.spec.whatwg.org/#body + * + * @param Stream body Readable stream + * @param Object opts Response options + * @return Void + */ +function Body(body) { + var _this = this; + + var _ref = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}, + _ref$size = _ref.size; + + let size = _ref$size === undefined ? 0 : _ref$size; + var _ref$timeout = _ref.timeout; + let timeout = _ref$timeout === undefined ? 0 : _ref$timeout; + + if (body == null) { + // body is undefined or null + body = null; + } else if (isURLSearchParams(body)) { + // body is a URLSearchParams + body = Buffer.from(body.toString()); + } else if (isBlob(body)) ; else if (Buffer.isBuffer(body)) ; else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') { + // body is ArrayBuffer + body = Buffer.from(body); + } else if (ArrayBuffer.isView(body)) { + // body is ArrayBufferView + body = Buffer.from(body.buffer, body.byteOffset, body.byteLength); + } else if (body instanceof Stream) ; else { + // none of the above + // coerce to string then buffer + body = Buffer.from(String(body)); + } + this[INTERNALS] = { + body, + disturbed: false, + error: null + }; + this.size = size; + this.timeout = timeout; + + if (body instanceof Stream) { + body.on('error', function (err) { + const error = err.name === 'AbortError' ? err : new FetchError(`Invalid response body while trying to fetch ${_this.url}: ${err.message}`, 'system', err); + _this[INTERNALS].error = error; + }); + } +} + +Body.prototype = { + get body() { + return this[INTERNALS].body; + }, + + get bodyUsed() { + return this[INTERNALS].disturbed; + }, + + /** + * Decode response as ArrayBuffer + * + * @return Promise + */ + arrayBuffer() { + return consumeBody.call(this).then(function (buf) { + return buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); + }); + }, + + /** + * Return raw response as Blob + * + * @return Promise + */ + blob() { + let ct = this.headers && this.headers.get('content-type') || ''; + return consumeBody.call(this).then(function (buf) { + return Object.assign( + // Prevent copying + new Blob([], { + type: ct.toLowerCase() + }), { + [BUFFER]: buf + }); + }); + }, + + /** + * Decode response as json + * + * @return Promise + */ + json() { + var _this2 = this; + + return consumeBody.call(this).then(function (buffer) { + try { + return JSON.parse(buffer.toString()); + } catch (err) { + return Body.Promise.reject(new FetchError(`invalid json response body at ${_this2.url} reason: ${err.message}`, 'invalid-json')); + } + }); + }, + + /** + * Decode response as text + * + * @return Promise + */ + text() { + return consumeBody.call(this).then(function (buffer) { + return buffer.toString(); + }); + }, + + /** + * Decode response as buffer (non-spec api) + * + * @return Promise + */ + buffer() { + return consumeBody.call(this); + }, + + /** + * Decode response as text, while automatically detecting the encoding and + * trying to decode to UTF-8 (non-spec api) + * + * @return Promise + */ + textConverted() { + var _this3 = this; + + return consumeBody.call(this).then(function (buffer) { + return convertBody(buffer, _this3.headers); + }); + } +}; + +// In browsers, all properties are enumerable. +Object.defineProperties(Body.prototype, { + body: { enumerable: true }, + bodyUsed: { enumerable: true }, + arrayBuffer: { enumerable: true }, + blob: { enumerable: true }, + json: { enumerable: true }, + text: { enumerable: true } +}); + +Body.mixIn = function (proto) { + for (const name of Object.getOwnPropertyNames(Body.prototype)) { + // istanbul ignore else: future proof + if (!(name in proto)) { + const desc = Object.getOwnPropertyDescriptor(Body.prototype, name); + Object.defineProperty(proto, name, desc); + } + } +}; + +/** + * Consume and convert an entire Body to a Buffer. + * + * Ref: https://fetch.spec.whatwg.org/#concept-body-consume-body + * + * @return Promise + */ +function consumeBody() { + var _this4 = this; + + if (this[INTERNALS].disturbed) { + return Body.Promise.reject(new TypeError(`body used already for: ${this.url}`)); + } + + this[INTERNALS].disturbed = true; + + if (this[INTERNALS].error) { + return Body.Promise.reject(this[INTERNALS].error); + } + + let body = this.body; + + // body is null + if (body === null) { + return Body.Promise.resolve(Buffer.alloc(0)); + } + + // body is blob + if (isBlob(body)) { + body = body.stream(); + } + + // body is buffer + if (Buffer.isBuffer(body)) { + return Body.Promise.resolve(body); + } + + // istanbul ignore if: should never happen + if (!(body instanceof Stream)) { + return Body.Promise.resolve(Buffer.alloc(0)); + } + + // body is stream + // get ready to actually consume the body + let accum = []; + let accumBytes = 0; + let abort = false; + + return new Body.Promise(function (resolve, reject) { + let resTimeout; + + // allow timeout on slow response body + if (_this4.timeout) { + resTimeout = setTimeout(function () { + abort = true; + reject(new FetchError(`Response timeout while trying to fetch ${_this4.url} (over ${_this4.timeout}ms)`, 'body-timeout')); + }, _this4.timeout); + } + + // handle stream errors + body.on('error', function (err) { + if (err.name === 'AbortError') { + // if the request was aborted, reject with this Error + abort = true; + reject(err); + } else { + // other errors, such as incorrect content-encoding + reject(new FetchError(`Invalid response body while trying to fetch ${_this4.url}: ${err.message}`, 'system', err)); + } + }); + + body.on('data', function (chunk) { + if (abort || chunk === null) { + return; + } + + if (_this4.size && accumBytes + chunk.length > _this4.size) { + abort = true; + reject(new FetchError(`content size at ${_this4.url} over limit: ${_this4.size}`, 'max-size')); + return; + } + + accumBytes += chunk.length; + accum.push(chunk); + }); + + body.on('end', function () { + if (abort) { + return; + } + + clearTimeout(resTimeout); + + try { + resolve(Buffer.concat(accum, accumBytes)); + } catch (err) { + // handle streams that have accumulated too much data (issue #414) + reject(new FetchError(`Could not create Buffer from response body for ${_this4.url}: ${err.message}`, 'system', err)); + } + }); + }); +} + +/** + * Detect buffer encoding and convert to target encoding + * ref: http://www.w3.org/TR/2011/WD-html5-20110113/parsing.html#determining-the-character-encoding + * + * @param Buffer buffer Incoming buffer + * @param String encoding Target encoding + * @return String + */ +function convertBody(buffer, headers) { + if (typeof convert !== 'function') { + throw new Error('The package `encoding` must be installed to use the textConverted() function'); + } + + const ct = headers.get('content-type'); + let charset = 'utf-8'; + let res, str; + + // header + if (ct) { + res = /charset=([^;]*)/i.exec(ct); + } + + // no charset in content type, peek at response body for at most 1024 bytes + str = buffer.slice(0, 1024).toString(); + + // html5 + if (!res && str) { + res = / 0 && arguments[0] !== undefined ? arguments[0] : undefined; + + this[MAP] = Object.create(null); + + if (init instanceof Headers) { + const rawHeaders = init.raw(); + const headerNames = Object.keys(rawHeaders); + + for (const headerName of headerNames) { + for (const value of rawHeaders[headerName]) { + this.append(headerName, value); + } + } + + return; + } + + // We don't worry about converting prop to ByteString here as append() + // will handle it. + if (init == null) ; else if (typeof init === 'object') { + const method = init[Symbol.iterator]; + if (method != null) { + if (typeof method !== 'function') { + throw new TypeError('Header pairs must be iterable'); + } + + // sequence> + // Note: per spec we have to first exhaust the lists then process them + const pairs = []; + for (const pair of init) { + if (typeof pair !== 'object' || typeof pair[Symbol.iterator] !== 'function') { + throw new TypeError('Each header pair must be iterable'); + } + pairs.push(Array.from(pair)); + } + + for (const pair of pairs) { + if (pair.length !== 2) { + throw new TypeError('Each header pair must be a name/value tuple'); + } + this.append(pair[0], pair[1]); + } + } else { + // record + for (const key of Object.keys(init)) { + const value = init[key]; + this.append(key, value); + } + } + } else { + throw new TypeError('Provided initializer must be an object'); + } + } + + /** + * Return combined header value given name + * + * @param String name Header name + * @return Mixed + */ + get(name) { + name = `${name}`; + validateName(name); + const key = find(this[MAP], name); + if (key === undefined) { + return null; + } + + return this[MAP][key].join(', '); + } + + /** + * Iterate over all headers + * + * @param Function callback Executed for each item with parameters (value, name, thisArg) + * @param Boolean thisArg `this` context for callback function + * @return Void + */ + forEach(callback) { + let thisArg = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : undefined; + + let pairs = getHeaders(this); + let i = 0; + while (i < pairs.length) { + var _pairs$i = pairs[i]; + const name = _pairs$i[0], + value = _pairs$i[1]; + + callback.call(thisArg, value, name, this); + pairs = getHeaders(this); + i++; + } + } + + /** + * Overwrite header values given name + * + * @param String name Header name + * @param String value Header value + * @return Void + */ + set(name, value) { + name = `${name}`; + value = `${value}`; + validateName(name); + validateValue(value); + const key = find(this[MAP], name); + this[MAP][key !== undefined ? key : name] = [value]; + } + + /** + * Append a value onto existing header + * + * @param String name Header name + * @param String value Header value + * @return Void + */ + append(name, value) { + name = `${name}`; + value = `${value}`; + validateName(name); + validateValue(value); + const key = find(this[MAP], name); + if (key !== undefined) { + this[MAP][key].push(value); + } else { + this[MAP][name] = [value]; + } + } + + /** + * Check for header name existence + * + * @param String name Header name + * @return Boolean + */ + has(name) { + name = `${name}`; + validateName(name); + return find(this[MAP], name) !== undefined; + } + + /** + * Delete all header values given name + * + * @param String name Header name + * @return Void + */ + delete(name) { + name = `${name}`; + validateName(name); + const key = find(this[MAP], name); + if (key !== undefined) { + delete this[MAP][key]; + } + } + + /** + * Return raw headers (non-spec api) + * + * @return Object + */ + raw() { + return this[MAP]; + } + + /** + * Get an iterator on keys. + * + * @return Iterator + */ + keys() { + return createHeadersIterator(this, 'key'); + } + + /** + * Get an iterator on values. + * + * @return Iterator + */ + values() { + return createHeadersIterator(this, 'value'); + } + + /** + * Get an iterator on entries. + * + * This is the default iterator of the Headers object. + * + * @return Iterator + */ + [Symbol.iterator]() { + return createHeadersIterator(this, 'key+value'); + } +} +Headers.prototype.entries = Headers.prototype[Symbol.iterator]; + +Object.defineProperty(Headers.prototype, Symbol.toStringTag, { + value: 'Headers', + writable: false, + enumerable: false, + configurable: true +}); + +Object.defineProperties(Headers.prototype, { + get: { enumerable: true }, + forEach: { enumerable: true }, + set: { enumerable: true }, + append: { enumerable: true }, + has: { enumerable: true }, + delete: { enumerable: true }, + keys: { enumerable: true }, + values: { enumerable: true }, + entries: { enumerable: true } +}); + +function getHeaders(headers) { + let kind = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'key+value'; + + const keys = Object.keys(headers[MAP]).sort(); + return keys.map(kind === 'key' ? function (k) { + return k.toLowerCase(); + } : kind === 'value' ? function (k) { + return headers[MAP][k].join(', '); + } : function (k) { + return [k.toLowerCase(), headers[MAP][k].join(', ')]; + }); +} + +const INTERNAL = Symbol('internal'); + +function createHeadersIterator(target, kind) { + const iterator = Object.create(HeadersIteratorPrototype); + iterator[INTERNAL] = { + target, + kind, + index: 0 + }; + return iterator; +} + +const HeadersIteratorPrototype = Object.setPrototypeOf({ + next() { + // istanbul ignore if + if (!this || Object.getPrototypeOf(this) !== HeadersIteratorPrototype) { + throw new TypeError('Value of `this` is not a HeadersIterator'); + } + + var _INTERNAL = this[INTERNAL]; + const target = _INTERNAL.target, + kind = _INTERNAL.kind, + index = _INTERNAL.index; + + const values = getHeaders(target, kind); + const len = values.length; + if (index >= len) { + return { + value: undefined, + done: true + }; + } + + this[INTERNAL].index = index + 1; + + return { + value: values[index], + done: false + }; + } +}, Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]()))); + +Object.defineProperty(HeadersIteratorPrototype, Symbol.toStringTag, { + value: 'HeadersIterator', + writable: false, + enumerable: false, + configurable: true +}); + +/** + * Export the Headers object in a form that Node.js can consume. + * + * @param Headers headers + * @return Object + */ +function exportNodeCompatibleHeaders(headers) { + const obj = Object.assign({ __proto__: null }, headers[MAP]); + + // http.request() only supports string as Host header. This hack makes + // specifying custom Host header possible. + const hostHeaderKey = find(headers[MAP], 'Host'); + if (hostHeaderKey !== undefined) { + obj[hostHeaderKey] = obj[hostHeaderKey][0]; + } + + return obj; +} + +/** + * Create a Headers object from an object of headers, ignoring those that do + * not conform to HTTP grammar productions. + * + * @param Object obj Object of headers + * @return Headers + */ +function createHeadersLenient(obj) { + const headers = new Headers(); + for (const name of Object.keys(obj)) { + if (invalidTokenRegex.test(name)) { + continue; + } + if (Array.isArray(obj[name])) { + for (const val of obj[name]) { + if (invalidHeaderCharRegex.test(val)) { + continue; + } + if (headers[MAP][name] === undefined) { + headers[MAP][name] = [val]; + } else { + headers[MAP][name].push(val); + } + } + } else if (!invalidHeaderCharRegex.test(obj[name])) { + headers[MAP][name] = [obj[name]]; + } + } + return headers; +} + +const INTERNALS$1 = Symbol('Response internals'); + +// fix an issue where "STATUS_CODES" aren't a named export for node <10 +const STATUS_CODES = http.STATUS_CODES; + +/** + * Response class + * + * @param Stream body Readable stream + * @param Object opts Response options + * @return Void + */ +class Response { + constructor() { + let body = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : null; + let opts = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; + + Body.call(this, body, opts); + + const status = opts.status || 200; + const headers = new Headers(opts.headers); + + if (body != null && !headers.has('Content-Type')) { + const contentType = extractContentType(body); + if (contentType) { + headers.append('Content-Type', contentType); + } + } + + this[INTERNALS$1] = { + url: opts.url, + status, + statusText: opts.statusText || STATUS_CODES[status], + headers, + counter: opts.counter + }; + } + + get url() { + return this[INTERNALS$1].url || ''; + } + + get status() { + return this[INTERNALS$1].status; + } + + /** + * Convenience property representing if the request ended normally + */ + get ok() { + return this[INTERNALS$1].status >= 200 && this[INTERNALS$1].status < 300; + } + + get redirected() { + return this[INTERNALS$1].counter > 0; + } + + get statusText() { + return this[INTERNALS$1].statusText; + } + + get headers() { + return this[INTERNALS$1].headers; + } + + /** + * Clone this response + * + * @return Response + */ + clone() { + return new Response(clone(this), { + url: this.url, + status: this.status, + statusText: this.statusText, + headers: this.headers, + ok: this.ok, + redirected: this.redirected + }); + } +} + +Body.mixIn(Response.prototype); + +Object.defineProperties(Response.prototype, { + url: { enumerable: true }, + status: { enumerable: true }, + ok: { enumerable: true }, + redirected: { enumerable: true }, + statusText: { enumerable: true }, + headers: { enumerable: true }, + clone: { enumerable: true } +}); + +Object.defineProperty(Response.prototype, Symbol.toStringTag, { + value: 'Response', + writable: false, + enumerable: false, + configurable: true +}); + +const INTERNALS$2 = Symbol('Request internals'); + +// fix an issue where "format", "parse" aren't a named export for node <10 +const parse_url = Url.parse; +const format_url = Url.format; + +const streamDestructionSupported = 'destroy' in Stream.Readable.prototype; + +/** + * Check if a value is an instance of Request. + * + * @param Mixed input + * @return Boolean + */ +function isRequest(input) { + return typeof input === 'object' && typeof input[INTERNALS$2] === 'object'; +} + +function isAbortSignal(signal) { + const proto = signal && typeof signal === 'object' && Object.getPrototypeOf(signal); + return !!(proto && proto.constructor.name === 'AbortSignal'); +} + +/** + * Request class + * + * @param Mixed input Url or Request instance + * @param Object init Custom options + * @return Void + */ +class Request { + constructor(input) { + let init = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; + + let parsedURL; + + // normalize input + if (!isRequest(input)) { + if (input && input.href) { + // in order to support Node.js' Url objects; though WHATWG's URL objects + // will fall into this branch also (since their `toString()` will return + // `href` property anyway) + parsedURL = parse_url(input.href); + } else { + // coerce input to a string before attempting to parse + parsedURL = parse_url(`${input}`); + } + input = {}; + } else { + parsedURL = parse_url(input.url); + } + + let method = init.method || input.method || 'GET'; + method = method.toUpperCase(); + + if ((init.body != null || isRequest(input) && input.body !== null) && (method === 'GET' || method === 'HEAD')) { + throw new TypeError('Request with GET/HEAD method cannot have body'); + } + + let inputBody = init.body != null ? init.body : isRequest(input) && input.body !== null ? clone(input) : null; + + Body.call(this, inputBody, { + timeout: init.timeout || input.timeout || 0, + size: init.size || input.size || 0 + }); + + const headers = new Headers(init.headers || input.headers || {}); + + if (inputBody != null && !headers.has('Content-Type')) { + const contentType = extractContentType(inputBody); + if (contentType) { + headers.append('Content-Type', contentType); + } + } + + let signal = isRequest(input) ? input.signal : null; + if ('signal' in init) signal = init.signal; + + if (signal != null && !isAbortSignal(signal)) { + throw new TypeError('Expected signal to be an instanceof AbortSignal'); + } + + this[INTERNALS$2] = { + method, + redirect: init.redirect || input.redirect || 'follow', + headers, + parsedURL, + signal + }; + + // node-fetch-only options + this.follow = init.follow !== undefined ? init.follow : input.follow !== undefined ? input.follow : 20; + this.compress = init.compress !== undefined ? init.compress : input.compress !== undefined ? input.compress : true; + this.counter = init.counter || input.counter || 0; + this.agent = init.agent || input.agent; + } + + get method() { + return this[INTERNALS$2].method; + } + + get url() { + return format_url(this[INTERNALS$2].parsedURL); + } + + get headers() { + return this[INTERNALS$2].headers; + } + + get redirect() { + return this[INTERNALS$2].redirect; + } + + get signal() { + return this[INTERNALS$2].signal; + } + + /** + * Clone this request + * + * @return Request + */ + clone() { + return new Request(this); + } +} + +Body.mixIn(Request.prototype); + +Object.defineProperty(Request.prototype, Symbol.toStringTag, { + value: 'Request', + writable: false, + enumerable: false, + configurable: true +}); + +Object.defineProperties(Request.prototype, { + method: { enumerable: true }, + url: { enumerable: true }, + headers: { enumerable: true }, + redirect: { enumerable: true }, + clone: { enumerable: true }, + signal: { enumerable: true } +}); + +/** + * Convert a Request to Node.js http request options. + * + * @param Request A Request instance + * @return Object The options object to be passed to http.request + */ +function getNodeRequestOptions(request) { + const parsedURL = request[INTERNALS$2].parsedURL; + const headers = new Headers(request[INTERNALS$2].headers); + + // fetch step 1.3 + if (!headers.has('Accept')) { + headers.set('Accept', '*/*'); + } + + // Basic fetch + if (!parsedURL.protocol || !parsedURL.hostname) { + throw new TypeError('Only absolute URLs are supported'); + } + + if (!/^https?:$/.test(parsedURL.protocol)) { + throw new TypeError('Only HTTP(S) protocols are supported'); + } + + if (request.signal && request.body instanceof Stream.Readable && !streamDestructionSupported) { + throw new Error('Cancellation of streamed requests with AbortSignal is not supported in node < 8'); + } + + // HTTP-network-or-cache fetch steps 2.4-2.7 + let contentLengthValue = null; + if (request.body == null && /^(POST|PUT)$/i.test(request.method)) { + contentLengthValue = '0'; + } + if (request.body != null) { + const totalBytes = getTotalBytes(request); + if (typeof totalBytes === 'number') { + contentLengthValue = String(totalBytes); + } + } + if (contentLengthValue) { + headers.set('Content-Length', contentLengthValue); + } + + // HTTP-network-or-cache fetch step 2.11 + if (!headers.has('User-Agent')) { + headers.set('User-Agent', 'node-fetch/1.0 (+https://github.com/bitinn/node-fetch)'); + } + + // HTTP-network-or-cache fetch step 2.15 + if (request.compress && !headers.has('Accept-Encoding')) { + headers.set('Accept-Encoding', 'gzip,deflate'); + } + + let agent = request.agent; + if (typeof agent === 'function') { + agent = agent(parsedURL); + } + + if (!headers.has('Connection') && !agent) { + headers.set('Connection', 'close'); + } + + // HTTP-network fetch step 4.2 + // chunked encoding is handled by Node.js + + return Object.assign({}, parsedURL, { + method: request.method, + headers: exportNodeCompatibleHeaders(headers), + agent + }); +} + +/** + * abort-error.js + * + * AbortError interface for cancelled requests + */ + +/** + * Create AbortError instance + * + * @param String message Error message for human + * @return AbortError + */ +function AbortError(message) { + Error.call(this, message); + + this.type = 'aborted'; + this.message = message; + + // hide custom error implementation details from end-users + Error.captureStackTrace(this, this.constructor); +} + +AbortError.prototype = Object.create(Error.prototype); +AbortError.prototype.constructor = AbortError; +AbortError.prototype.name = 'AbortError'; + +// fix an issue where "PassThrough", "resolve" aren't a named export for node <10 +const PassThrough$1 = Stream.PassThrough; +const resolve_url = Url.resolve; + +/** + * Fetch function + * + * @param Mixed url Absolute url or Request instance + * @param Object opts Fetch options + * @return Promise + */ +function fetch(url, opts) { + + // allow custom promise + if (!fetch.Promise) { + throw new Error('native promise missing, set fetch.Promise to your favorite alternative'); + } + + Body.Promise = fetch.Promise; + + // wrap http.request into fetch + return new fetch.Promise(function (resolve, reject) { + // build request object + const request = new Request(url, opts); + const options = getNodeRequestOptions(request); + + const send = (options.protocol === 'https:' ? https : http).request; + const signal = request.signal; + + let response = null; + + const abort = function abort() { + let error = new AbortError('The user aborted a request.'); + reject(error); + if (request.body && request.body instanceof Stream.Readable) { + request.body.destroy(error); + } + if (!response || !response.body) return; + response.body.emit('error', error); + }; + + if (signal && signal.aborted) { + abort(); + return; + } + + const abortAndFinalize = function abortAndFinalize() { + abort(); + finalize(); + }; + + // send request + const req = send(options); + let reqTimeout; + + if (signal) { + signal.addEventListener('abort', abortAndFinalize); + } + + function finalize() { + req.abort(); + if (signal) signal.removeEventListener('abort', abortAndFinalize); + clearTimeout(reqTimeout); + } + + if (request.timeout) { + req.once('socket', function (socket) { + reqTimeout = setTimeout(function () { + reject(new FetchError(`network timeout at: ${request.url}`, 'request-timeout')); + finalize(); + }, request.timeout); + }); + } + + req.on('error', function (err) { + reject(new FetchError(`request to ${request.url} failed, reason: ${err.message}`, 'system', err)); + finalize(); + }); + + req.on('response', function (res) { + clearTimeout(reqTimeout); + + const headers = createHeadersLenient(res.headers); + + // HTTP fetch step 5 + if (fetch.isRedirect(res.statusCode)) { + // HTTP fetch step 5.2 + const location = headers.get('Location'); + + // HTTP fetch step 5.3 + const locationURL = location === null ? null : resolve_url(request.url, location); + + // HTTP fetch step 5.5 + switch (request.redirect) { + case 'error': + reject(new FetchError(`uri requested responds with a redirect, redirect mode is set to error: ${request.url}`, 'no-redirect')); + finalize(); + return; + case 'manual': + // node-fetch-specific step: make manual redirect a bit easier to use by setting the Location header value to the resolved URL. + if (locationURL !== null) { + // handle corrupted header + try { + headers.set('Location', locationURL); + } catch (err) { + // istanbul ignore next: nodejs server prevent invalid response headers, we can't test this through normal request + reject(err); + } + } + break; + case 'follow': + // HTTP-redirect fetch step 2 + if (locationURL === null) { + break; + } + + // HTTP-redirect fetch step 5 + if (request.counter >= request.follow) { + reject(new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect')); + finalize(); + return; + } + + // HTTP-redirect fetch step 6 (counter increment) + // Create a new Request object. + const requestOpts = { + headers: new Headers(request.headers), + follow: request.follow, + counter: request.counter + 1, + agent: request.agent, + compress: request.compress, + method: request.method, + body: request.body, + signal: request.signal, + timeout: request.timeout, + size: request.size + }; + + // HTTP-redirect fetch step 9 + if (res.statusCode !== 303 && request.body && getTotalBytes(request) === null) { + reject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect')); + finalize(); + return; + } + + // HTTP-redirect fetch step 11 + if (res.statusCode === 303 || (res.statusCode === 301 || res.statusCode === 302) && request.method === 'POST') { + requestOpts.method = 'GET'; + requestOpts.body = undefined; + requestOpts.headers.delete('content-length'); + } + + // HTTP-redirect fetch step 15 + resolve(fetch(new Request(locationURL, requestOpts))); + finalize(); + return; + } + } + + // prepare response + res.once('end', function () { + if (signal) signal.removeEventListener('abort', abortAndFinalize); + }); + let body = res.pipe(new PassThrough$1()); + + const response_options = { + url: request.url, + status: res.statusCode, + statusText: res.statusMessage, + headers: headers, + size: request.size, + timeout: request.timeout, + counter: request.counter + }; + + // HTTP-network fetch step 12.1.1.3 + const codings = headers.get('Content-Encoding'); + + // HTTP-network fetch step 12.1.1.4: handle content codings + + // in following scenarios we ignore compression support + // 1. compression support is disabled + // 2. HEAD request + // 3. no Content-Encoding header + // 4. no content response (204) + // 5. content not modified response (304) + if (!request.compress || request.method === 'HEAD' || codings === null || res.statusCode === 204 || res.statusCode === 304) { + response = new Response(body, response_options); + resolve(response); + return; + } + + // For Node v6+ + // Be less strict when decoding compressed responses, since sometimes + // servers send slightly invalid responses that are still accepted + // by common browsers. + // Always using Z_SYNC_FLUSH is what cURL does. + const zlibOptions = { + flush: zlib.Z_SYNC_FLUSH, + finishFlush: zlib.Z_SYNC_FLUSH + }; + + // for gzip + if (codings == 'gzip' || codings == 'x-gzip') { + body = body.pipe(zlib.createGunzip(zlibOptions)); + response = new Response(body, response_options); + resolve(response); + return; + } + + // for deflate + if (codings == 'deflate' || codings == 'x-deflate') { + // handle the infamous raw deflate response from old servers + // a hack for old IIS and Apache servers + const raw = res.pipe(new PassThrough$1()); + raw.once('data', function (chunk) { + // see http://stackoverflow.com/questions/37519828 + if ((chunk[0] & 0x0F) === 0x08) { + body = body.pipe(zlib.createInflate()); + } else { + body = body.pipe(zlib.createInflateRaw()); + } + response = new Response(body, response_options); + resolve(response); + }); + return; + } + + // for br + if (codings == 'br' && typeof zlib.createBrotliDecompress === 'function') { + body = body.pipe(zlib.createBrotliDecompress()); + response = new Response(body, response_options); + resolve(response); + return; + } + + // otherwise, use response as-is + response = new Response(body, response_options); + resolve(response); + }); + + writeToStream(req, request); + }); +} +/** + * Redirect code matching + * + * @param Number code Status code + * @return Boolean + */ +fetch.isRedirect = function (code) { + return code === 301 || code === 302 || code === 303 || code === 307 || code === 308; +}; + +// expose Promise +fetch.Promise = global.Promise; + +module.exports = exports = fetch; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports["default"] = exports; +exports.Headers = Headers; +exports.Request = Request; +exports.Response = Response; +exports.FetchError = FetchError; + + +/***/ }), + +/***/ 447: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var wrappy = __nccwpck_require__(5448) +module.exports = wrappy(once) +module.exports.strict = wrappy(onceStrict) + +once.proto = once(function () { + Object.defineProperty(Function.prototype, 'once', { + value: function () { + return once(this) + }, + configurable: true + }) + + Object.defineProperty(Function.prototype, 'onceStrict', { + value: function () { + return onceStrict(this) + }, + configurable: true + }) +}) + +function once (fn) { + var f = function () { + if (f.called) return f.value + f.called = true + return f.value = fn.apply(this, arguments) + } + f.called = false + return f +} + +function onceStrict (fn) { + var f = function () { + if (f.called) + throw new Error(f.onceError) + f.called = true + return f.value = fn.apply(this, arguments) + } + var name = fn.name || 'Function wrapped with `once`' + f.onceError = name + " shouldn't be called more than once" + f.called = false + return f +} + + +/***/ }), + +/***/ 1237: +/***/ ((module) => { + +"use strict"; + + +function posix(path) { + return path.charAt(0) === '/'; +} + +function win32(path) { + // https://github.com/nodejs/node/blob/b3fcc245fb25539909ef1d5eaa01dbf92e168633/lib/path.js#L56 + var splitDeviceRe = /^([a-zA-Z]:|[\\\/]{2}[^\\\/]+[\\\/]+[^\\\/]+)?([\\\/])?([\s\S]*?)$/; + var result = splitDeviceRe.exec(path); + var device = result[1] || ''; + var isUnc = Boolean(device && device.charAt(1) !== ':'); + + // UNC paths are always absolute + return Boolean(result[2] || isUnc); +} + +module.exports = process.platform === 'win32' ? win32 : posix; +module.exports.posix = posix; +module.exports.win32 = win32; + + +/***/ }), + +/***/ 5683: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +module.exports = __nccwpck_require__(578); + + +/***/ }), + +/***/ 578: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var net = __nccwpck_require__(1808); +var tls = __nccwpck_require__(4404); +var http = __nccwpck_require__(3685); +var https = __nccwpck_require__(5687); +var events = __nccwpck_require__(2361); +var assert = __nccwpck_require__(9491); +var util = __nccwpck_require__(3837); + + +exports.httpOverHttp = httpOverHttp; +exports.httpsOverHttp = httpsOverHttp; +exports.httpOverHttps = httpOverHttps; +exports.httpsOverHttps = httpsOverHttps; + + +function httpOverHttp(options) { + var agent = new TunnelingAgent(options); + agent.request = http.request; + return agent; +} + +function httpsOverHttp(options) { + var agent = new TunnelingAgent(options); + agent.request = http.request; + agent.createSocket = createSecureSocket; + agent.defaultPort = 443; + return agent; +} + +function httpOverHttps(options) { + var agent = new TunnelingAgent(options); + agent.request = https.request; + return agent; +} + +function httpsOverHttps(options) { + var agent = new TunnelingAgent(options); + agent.request = https.request; + agent.createSocket = createSecureSocket; + agent.defaultPort = 443; + return agent; +} + + +function TunnelingAgent(options) { + var self = this; + self.options = options || {}; + self.proxyOptions = self.options.proxy || {}; + self.maxSockets = self.options.maxSockets || http.Agent.defaultMaxSockets; + self.requests = []; + self.sockets = []; + + self.on('free', function onFree(socket, host, port, localAddress) { + var options = toOptions(host, port, localAddress); + for (var i = 0, len = self.requests.length; i < len; ++i) { + var pending = self.requests[i]; + if (pending.host === options.host && pending.port === options.port) { + // Detect the request to connect same origin server, + // reuse the connection. + self.requests.splice(i, 1); + pending.request.onSocket(socket); + return; + } + } + socket.destroy(); + self.removeSocket(socket); + }); +} +util.inherits(TunnelingAgent, events.EventEmitter); + +TunnelingAgent.prototype.addRequest = function addRequest(req, host, port, localAddress) { + var self = this; + var options = mergeOptions({request: req}, self.options, toOptions(host, port, localAddress)); + + if (self.sockets.length >= this.maxSockets) { + // We are over limit so we'll add it to the queue. + self.requests.push(options); + return; + } + + // If we are under maxSockets create a new one. + self.createSocket(options, function(socket) { + socket.on('free', onFree); + socket.on('close', onCloseOrRemove); + socket.on('agentRemove', onCloseOrRemove); + req.onSocket(socket); + + function onFree() { + self.emit('free', socket, options); + } + + function onCloseOrRemove(err) { + self.removeSocket(socket); + socket.removeListener('free', onFree); + socket.removeListener('close', onCloseOrRemove); + socket.removeListener('agentRemove', onCloseOrRemove); + } + }); +}; + +TunnelingAgent.prototype.createSocket = function createSocket(options, cb) { + var self = this; + var placeholder = {}; + self.sockets.push(placeholder); + + var connectOptions = mergeOptions({}, self.proxyOptions, { + method: 'CONNECT', + path: options.host + ':' + options.port, + agent: false, + headers: { + host: options.host + ':' + options.port + } + }); + if (options.localAddress) { + connectOptions.localAddress = options.localAddress; + } + if (connectOptions.proxyAuth) { + connectOptions.headers = connectOptions.headers || {}; + connectOptions.headers['Proxy-Authorization'] = 'Basic ' + + new Buffer(connectOptions.proxyAuth).toString('base64'); + } + + debug('making CONNECT request'); + var connectReq = self.request(connectOptions); + connectReq.useChunkedEncodingByDefault = false; // for v0.6 + connectReq.once('response', onResponse); // for v0.6 + connectReq.once('upgrade', onUpgrade); // for v0.6 + connectReq.once('connect', onConnect); // for v0.7 or later + connectReq.once('error', onError); + connectReq.end(); + + function onResponse(res) { + // Very hacky. This is necessary to avoid http-parser leaks. + res.upgrade = true; + } + + function onUpgrade(res, socket, head) { + // Hacky. + process.nextTick(function() { + onConnect(res, socket, head); + }); + } + + function onConnect(res, socket, head) { + connectReq.removeAllListeners(); + socket.removeAllListeners(); + + if (res.statusCode !== 200) { + debug('tunneling socket could not be established, statusCode=%d', + res.statusCode); + socket.destroy(); + var error = new Error('tunneling socket could not be established, ' + + 'statusCode=' + res.statusCode); + error.code = 'ECONNRESET'; + options.request.emit('error', error); + self.removeSocket(placeholder); + return; + } + if (head.length > 0) { + debug('got illegal response body from proxy'); + socket.destroy(); + var error = new Error('got illegal response body from proxy'); + error.code = 'ECONNRESET'; + options.request.emit('error', error); + self.removeSocket(placeholder); + return; + } + debug('tunneling connection has established'); + self.sockets[self.sockets.indexOf(placeholder)] = socket; + return cb(socket); + } + + function onError(cause) { + connectReq.removeAllListeners(); + + debug('tunneling socket could not be established, cause=%s\n', + cause.message, cause.stack); + var error = new Error('tunneling socket could not be established, ' + + 'cause=' + cause.message); + error.code = 'ECONNRESET'; + options.request.emit('error', error); + self.removeSocket(placeholder); + } +}; + +TunnelingAgent.prototype.removeSocket = function removeSocket(socket) { + var pos = this.sockets.indexOf(socket) + if (pos === -1) { + return; + } + this.sockets.splice(pos, 1); + + var pending = this.requests.shift(); + if (pending) { + // If we have pending requests and a socket gets closed a new one + // needs to be created to take over in the pool for the one that closed. + this.createSocket(pending, function(socket) { + pending.request.onSocket(socket); + }); + } +}; + +function createSecureSocket(options, cb) { + var self = this; + TunnelingAgent.prototype.createSocket.call(self, options, function(socket) { + var hostHeader = options.request.getHeader('host'); + var tlsOptions = mergeOptions({}, self.options, { + socket: socket, + servername: hostHeader ? hostHeader.replace(/:.*$/, '') : options.host + }); + + // 0 is dummy port for v0.6 + var secureSocket = tls.connect(0, tlsOptions); + self.sockets[self.sockets.indexOf(socket)] = secureSocket; + cb(secureSocket); + }); +} + + +function toOptions(host, port, localAddress) { + if (typeof host === 'string') { // since v0.10 + return { + host: host, + port: port, + localAddress: localAddress + }; + } + return host; // for v0.11 or later +} + +function mergeOptions(target) { + for (var i = 1, len = arguments.length; i < len; ++i) { + var overrides = arguments[i]; + if (typeof overrides === 'object') { + var keys = Object.keys(overrides); + for (var j = 0, keyLen = keys.length; j < keyLen; ++j) { + var k = keys[j]; + if (overrides[k] !== undefined) { + target[k] = overrides[k]; + } + } + } + } + return target; +} + + +var debug; +if (process.env.NODE_DEBUG && /\btunnel\b/.test(process.env.NODE_DEBUG)) { + debug = function() { + var args = Array.prototype.slice.call(arguments); + if (typeof args[0] === 'string') { + args[0] = 'TUNNEL: ' + args[0]; + } else { + args.unshift('TUNNEL:'); + } + console.error.apply(console, args); + } +} else { + debug = function() {}; +} +exports.debug = debug; // for test + + +/***/ }), + +/***/ 4378: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ value: true })); + +function getUserAgent() { + if (typeof navigator === "object" && "userAgent" in navigator) { + return navigator.userAgent; + } + + if (typeof process === "object" && "version" in process) { + return `Node.js/${process.version.substr(1)} (${process.platform}; ${process.arch})`; + } + + return ""; +} + +exports.getUserAgent = getUserAgent; +//# sourceMappingURL=index.js.map + + +/***/ }), + +/***/ 6732: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +"use strict"; + +const os = __nccwpck_require__(2037); + +const homeDirectory = os.homedir(); + +module.exports = pathWithTilde => { + if (typeof pathWithTilde !== 'string') { + throw new TypeError(`Expected a string, got ${typeof pathWithTilde}`); + } + + return homeDirectory ? pathWithTilde.replace(/^~(?=$|\/|\\)/, homeDirectory) : pathWithTilde; +}; + + +/***/ }), + +/***/ 5448: +/***/ ((module) => { + +// Returns a wrapper function that returns a wrapped callback +// The wrapper function should do some stuff, and return a +// presumably different callback function. +// This makes sure that own properties are retained, so that +// decorations and such are not lost along the way. +module.exports = wrappy +function wrappy (fn, cb) { + if (fn && cb) return wrappy(fn)(cb) + + if (typeof fn !== 'function') + throw new TypeError('need wrapper function') + + Object.keys(fn).forEach(function (k) { + wrapper[k] = fn[k] + }) + + return wrapper + + function wrapper() { + var args = new Array(arguments.length) + for (var i = 0; i < args.length; i++) { + args[i] = arguments[i] + } + var ret = fn.apply(this, args) + var cb = args[args.length-1] + if (typeof ret === 'function' && ret !== cb) { + Object.keys(cb).forEach(function (k) { + ret[k] = cb[k] + }) + } + return ret + } +} + + +/***/ }), + +/***/ 7923: +/***/ ((module) => { + +module.exports = eval("require")("encoding"); + + +/***/ }), + +/***/ 9491: +/***/ ((module) => { + +"use strict"; +module.exports = require("assert"); + +/***/ }), + +/***/ 2361: +/***/ ((module) => { + +"use strict"; +module.exports = require("events"); + +/***/ }), + +/***/ 7147: +/***/ ((module) => { + +"use strict"; +module.exports = require("fs"); + +/***/ }), + +/***/ 3685: +/***/ ((module) => { + +"use strict"; +module.exports = require("http"); + +/***/ }), + +/***/ 5687: +/***/ ((module) => { + +"use strict"; +module.exports = require("https"); + +/***/ }), + +/***/ 1808: +/***/ ((module) => { + +"use strict"; +module.exports = require("net"); + +/***/ }), + +/***/ 2037: +/***/ ((module) => { + +"use strict"; +module.exports = require("os"); + +/***/ }), + +/***/ 1017: +/***/ ((module) => { + +"use strict"; +module.exports = require("path"); + +/***/ }), + +/***/ 2781: +/***/ ((module) => { + +"use strict"; +module.exports = require("stream"); + +/***/ }), + +/***/ 4404: +/***/ ((module) => { + +"use strict"; +module.exports = require("tls"); + +/***/ }), + +/***/ 7310: +/***/ ((module) => { + +"use strict"; +module.exports = require("url"); + +/***/ }), + +/***/ 3837: +/***/ ((module) => { + +"use strict"; +module.exports = require("util"); + +/***/ }), + +/***/ 9796: +/***/ ((module) => { + +"use strict"; +module.exports = require("zlib"); + +/***/ }) + +/******/ }); +/************************************************************************/ +/******/ // The module cache +/******/ var __webpack_module_cache__ = {}; +/******/ +/******/ // The require function +/******/ function __nccwpck_require__(moduleId) { +/******/ // Check if module is in cache +/******/ var cachedModule = __webpack_module_cache__[moduleId]; +/******/ if (cachedModule !== undefined) { +/******/ return cachedModule.exports; +/******/ } +/******/ // Create a new module (and put it into the cache) +/******/ var module = __webpack_module_cache__[moduleId] = { +/******/ // no module.id needed +/******/ // no module.loaded needed +/******/ exports: {} +/******/ }; +/******/ +/******/ // Execute the module function +/******/ var threw = true; +/******/ try { +/******/ __webpack_modules__[moduleId].call(module.exports, module, module.exports, __nccwpck_require__); +/******/ threw = false; +/******/ } finally { +/******/ if(threw) delete __webpack_module_cache__[moduleId]; +/******/ } +/******/ +/******/ // Return the exports of the module +/******/ return module.exports; +/******/ } +/******/ +/************************************************************************/ +/******/ /* webpack/runtime/compat */ +/******/ +/******/ if (typeof __nccwpck_require__ !== 'undefined') __nccwpck_require__.ab = __dirname + "/"; +/******/ +/************************************************************************/ +/******/ +/******/ // startup +/******/ // Load entry module and return exports +/******/ // This entry module is referenced by other modules so it can't be inlined +/******/ var __webpack_exports__ = __nccwpck_require__(1769); +/******/ module.exports = __webpack_exports__; +/******/ +/******/ })() +; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/dist/index.js.map b/dist/index.js.map new file mode 100644 index 00000000..8f761905 --- /dev/null +++ b/dist/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","mappings":";;;;;;;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACnGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AClBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC/CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACtEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACzDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACvFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACvCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACxDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACTA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACzKA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC9BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACtGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AChEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC3FA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACvTA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACzCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC5EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACvCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACrDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACnCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC1CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACrDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACzDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACxhBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACxDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AChDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC/KA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACrYA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC3GA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACtDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACnJA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACxNA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACxuCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACzEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AChLA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC1DA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACxDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC7CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC1BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AClBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACxMA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACZA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACnBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACjEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC9SA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC3OA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AClxBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACleA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACrDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACRA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC1BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACrCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC15BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AChnDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACzCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACnBA;;;;;;;;;ACAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACvQA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACjBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACXA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AChCA;;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;ACAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;AC7BA;AACA;;;;AEDA;AACA;AACA;AACA","sources":["../webpack://release-action/./lib/Action.js","../webpack://release-action/./lib/Artifact.js","../webpack://release-action/./lib/ArtifactDestroyer.js","../webpack://release-action/./lib/ArtifactGlobber.js","../webpack://release-action/./lib/ArtifactPathValidator.js","../webpack://release-action/./lib/ArtifactUploader.js","../webpack://release-action/./lib/GithubError.js","../webpack://release-action/./lib/GithubErrorDetail.js","../webpack://release-action/./lib/Globber.js","../webpack://release-action/./lib/Inputs.js","../webpack://release-action/./lib/Outputs.js","../webpack://release-action/./lib/Releases.js","../webpack://release-action/./lib/main.js","../webpack://release-action/./node_modules/@actions/core/lib/command.js","../webpack://release-action/./node_modules/@actions/core/lib/core.js","../webpack://release-action/./node_modules/@actions/core/lib/file-command.js","../webpack://release-action/./node_modules/@actions/core/lib/oidc-utils.js","../webpack://release-action/./node_modules/@actions/core/lib/utils.js","../webpack://release-action/./node_modules/@actions/github/lib/context.js","../webpack://release-action/./node_modules/@actions/github/lib/github.js","../webpack://release-action/./node_modules/@actions/github/lib/internal/utils.js","../webpack://release-action/./node_modules/@actions/github/lib/utils.js","../webpack://release-action/./node_modules/@actions/http-client/auth.js","../webpack://release-action/./node_modules/@actions/http-client/index.js","../webpack://release-action/./node_modules/@actions/http-client/proxy.js","../webpack://release-action/./node_modules/@octokit/auth-token/dist-node/index.js","../webpack://release-action/./node_modules/@octokit/core/dist-node/index.js","../webpack://release-action/./node_modules/@octokit/endpoint/dist-node/index.js","../webpack://release-action/./node_modules/@octokit/graphql/dist-node/index.js","../webpack://release-action/./node_modules/@octokit/graphql/node_modules/@octokit/request-error/dist-node/index.js","../webpack://release-action/./node_modules/@octokit/graphql/node_modules/@octokit/request/dist-node/index.js","../webpack://release-action/./node_modules/@octokit/plugin-paginate-rest/dist-node/index.js","../webpack://release-action/./node_modules/@octokit/plugin-rest-endpoint-methods/dist-node/index.js","../webpack://release-action/./node_modules/@octokit/request-error/dist-node/index.js","../webpack://release-action/./node_modules/@octokit/request/dist-node/index.js","../webpack://release-action/./node_modules/balanced-match/index.js","../webpack://release-action/./node_modules/before-after-hook/index.js","../webpack://release-action/./node_modules/before-after-hook/lib/add.js","../webpack://release-action/./node_modules/before-after-hook/lib/register.js","../webpack://release-action/./node_modules/before-after-hook/lib/remove.js","../webpack://release-action/./node_modules/brace-expansion/index.js","../webpack://release-action/./node_modules/concat-map/index.js","../webpack://release-action/./node_modules/deprecation/dist-node/index.js","../webpack://release-action/./node_modules/fs.realpath/index.js","../webpack://release-action/./node_modules/fs.realpath/old.js","../webpack://release-action/./node_modules/glob/common.js","../webpack://release-action/./node_modules/glob/glob.js","../webpack://release-action/./node_modules/glob/sync.js","../webpack://release-action/./node_modules/inflight/inflight.js","../webpack://release-action/./node_modules/inherits/inherits.js","../webpack://release-action/./node_modules/inherits/inherits_browser.js","../webpack://release-action/./node_modules/is-plain-object/dist/is-plain-object.js","../webpack://release-action/./node_modules/minimatch/minimatch.js","../webpack://release-action/./node_modules/node-fetch/lib/index.js","../webpack://release-action/./node_modules/once/once.js","../webpack://release-action/./node_modules/path-is-absolute/index.js","../webpack://release-action/./node_modules/tunnel/index.js","../webpack://release-action/./node_modules/tunnel/lib/tunnel.js","../webpack://release-action/./node_modules/universal-user-agent/dist-node/index.js","../webpack://release-action/./node_modules/untildify/index.js","../webpack://release-action/./node_modules/wrappy/wrappy.js","../webpack://release-action/../../.config/yarn/global/node_modules/@vercel/ncc/dist/ncc/@@notfound.js","../webpack://release-action/external node-commonjs \"assert\"","../webpack://release-action/external node-commonjs \"events\"","../webpack://release-action/external node-commonjs \"fs\"","../webpack://release-action/external node-commonjs \"http\"","../webpack://release-action/external node-commonjs \"https\"","../webpack://release-action/external node-commonjs \"net\"","../webpack://release-action/external node-commonjs \"os\"","../webpack://release-action/external node-commonjs \"path\"","../webpack://release-action/external node-commonjs \"stream\"","../webpack://release-action/external node-commonjs \"tls\"","../webpack://release-action/external node-commonjs \"url\"","../webpack://release-action/external node-commonjs \"util\"","../webpack://release-action/external node-commonjs \"zlib\"","../webpack://release-action/webpack/bootstrap","../webpack://release-action/webpack/runtime/compat","../webpack://release-action/webpack/before-startup","../webpack://release-action/webpack/startup","../webpack://release-action/webpack/after-startup"],"sourcesContent":["\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Action = void 0;\nconst GithubError_1 = require(\"./GithubError\");\nclass Action {\n constructor(inputs, outputs, releases, uploader, artifactDestroyer) {\n this.inputs = inputs;\n this.outputs = outputs;\n this.releases = releases;\n this.uploader = uploader;\n this.artifactDestroyer = artifactDestroyer;\n }\n perform() {\n return __awaiter(this, void 0, void 0, function* () {\n const releaseResponse = yield this.createOrUpdateRelease();\n const releaseData = releaseResponse.data;\n const releaseId = releaseData.id;\n const uploadUrl = releaseData.upload_url;\n if (this.inputs.removeArtifacts) {\n yield this.artifactDestroyer.destroyArtifacts(releaseId);\n }\n const artifacts = this.inputs.artifacts;\n if (artifacts.length > 0) {\n yield this.uploader.uploadArtifacts(artifacts, releaseId, uploadUrl);\n }\n this.outputs.applyReleaseData(releaseData);\n });\n }\n createOrUpdateRelease() {\n return __awaiter(this, void 0, void 0, function* () {\n if (this.inputs.allowUpdates) {\n let getResponse;\n try {\n getResponse = yield this.releases.getByTag(this.inputs.tag);\n }\n catch (error) {\n return yield this.checkForMissingReleaseError(error);\n }\n return yield this.updateRelease(getResponse.data.id);\n }\n else {\n return yield this.createRelease();\n }\n });\n }\n checkForMissingReleaseError(error) {\n return __awaiter(this, void 0, void 0, function* () {\n if (Action.noPublishedRelease(error)) {\n return yield this.updateDraftOrCreateRelease();\n }\n else {\n throw error;\n }\n });\n }\n updateRelease(id) {\n return __awaiter(this, void 0, void 0, function* () {\n return yield this.releases.update(id, this.inputs.tag, this.inputs.updatedReleaseBody, this.inputs.commit, this.inputs.discussionCategory, this.inputs.draft, this.inputs.updatedReleaseName, this.inputs.updatedPrerelease);\n });\n }\n static noPublishedRelease(error) {\n const githubError = new GithubError_1.GithubError(error);\n return githubError.status == 404;\n }\n updateDraftOrCreateRelease() {\n return __awaiter(this, void 0, void 0, function* () {\n const draftReleaseId = yield this.findMatchingDraftReleaseId();\n if (draftReleaseId) {\n return yield this.updateRelease(draftReleaseId);\n }\n else {\n return yield this.createRelease();\n }\n });\n }\n findMatchingDraftReleaseId() {\n return __awaiter(this, void 0, void 0, function* () {\n const tag = this.inputs.tag;\n const response = yield this.releases.listReleases();\n const releases = response.data;\n const draftRelease = releases.find(release => release.draft && release.tag_name == tag);\n return draftRelease === null || draftRelease === void 0 ? void 0 : draftRelease.id;\n });\n }\n createRelease() {\n return __awaiter(this, void 0, void 0, function* () {\n return yield this.releases.create(this.inputs.tag, this.inputs.createdReleaseBody, this.inputs.commit, this.inputs.discussionCategory, this.inputs.draft, this.inputs.createdReleaseName, this.inputs.createdPrerelease);\n });\n }\n}\nexports.Action = Action;\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Artifact = void 0;\nconst path_1 = require(\"path\");\nconst fs_1 = require(\"fs\");\nclass Artifact {\n constructor(path, contentType = \"raw\") {\n this.path = path;\n this.name = (0, path_1.basename)(path);\n this.contentType = contentType;\n }\n get contentLength() {\n return (0, fs_1.statSync)(this.path).size;\n }\n readFile() {\n return (0, fs_1.readFileSync)(this.path);\n }\n}\nexports.Artifact = Artifact;\n","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.GithubArtifactDestroyer = void 0;\nconst core = __importStar(require(\"@actions/core\"));\nclass GithubArtifactDestroyer {\n constructor(releases) {\n this.releases = releases;\n }\n destroyArtifacts(releaseId) {\n return __awaiter(this, void 0, void 0, function* () {\n const releaseAssets = yield this.releases.listArtifactsForRelease(releaseId);\n for (const artifact of releaseAssets) {\n const asset = artifact;\n core.debug(`Deleting existing artifact ${artifact.name}...`);\n yield this.releases.deleteArtifact(asset.id);\n }\n });\n }\n}\nexports.GithubArtifactDestroyer = GithubArtifactDestroyer;\n","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __importDefault = (this && this.__importDefault) || function (mod) {\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.FileArtifactGlobber = void 0;\nconst core = __importStar(require(\"@actions/core\"));\nconst Globber_1 = require(\"./Globber\");\nconst Artifact_1 = require(\"./Artifact\");\nconst untildify_1 = __importDefault(require(\"untildify\"));\nconst ArtifactPathValidator_1 = require(\"./ArtifactPathValidator\");\nclass FileArtifactGlobber {\n constructor(globber = new Globber_1.FileGlobber()) {\n this.globber = globber;\n }\n globArtifactString(artifact, contentType, errorsFailBuild) {\n const split = /[,\\n]/;\n return artifact.split(split)\n .map(path => path.trimStart())\n .map(path => FileArtifactGlobber.expandPath(path))\n .map(pattern => this.globPattern(pattern, errorsFailBuild))\n .map((globResult) => FileArtifactGlobber.validatePattern(errorsFailBuild, globResult[1], globResult[0]))\n .reduce((accumulated, current) => accumulated.concat(current))\n .map(path => new Artifact_1.Artifact(path, contentType));\n }\n globPattern(pattern, errorsFailBuild) {\n const paths = this.globber.glob(pattern);\n if (paths.length == 0) {\n if (errorsFailBuild) {\n FileArtifactGlobber.throwGlobError(pattern);\n }\n else {\n FileArtifactGlobber.reportGlobWarning(pattern);\n }\n }\n return [pattern, paths];\n }\n static validatePattern(errorsFailBuild, paths, pattern) {\n const validator = new ArtifactPathValidator_1.ArtifactPathValidator(errorsFailBuild, paths, pattern);\n return validator.validate();\n }\n static reportGlobWarning(pattern) {\n core.warning(`Artifact pattern :${pattern} did not match any files`);\n }\n static throwGlobError(pattern) {\n throw Error(`Artifact pattern :${pattern} did not match any files`);\n }\n static expandPath(path) {\n return (0, untildify_1.default)(path);\n }\n}\nexports.FileArtifactGlobber = FileArtifactGlobber;\n","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.ArtifactPathValidator = void 0;\nconst core = __importStar(require(\"@actions/core\"));\nconst fs_1 = require(\"fs\");\nclass ArtifactPathValidator {\n constructor(errorsFailBuild, paths, pattern) {\n this.paths = paths;\n this.pattern = pattern;\n this.errorsFailBuild = errorsFailBuild;\n }\n validate() {\n this.verifyPathsNotEmpty();\n return this.paths.filter((path) => this.verifyNotDirectory(path));\n }\n verifyPathsNotEmpty() {\n if (this.paths.length == 0) {\n const message = `Artifact pattern:${this.pattern} did not match any files`;\n this.reportError(message);\n }\n }\n verifyNotDirectory(path) {\n const isDir = (0, fs_1.statSync)(path).isDirectory();\n if (isDir) {\n const message = `Artifact is a directory:${path}. Directories can not be uploaded to a release.`;\n this.reportError(message);\n }\n return !isDir;\n }\n reportError(message) {\n if (this.errorsFailBuild) {\n throw Error(message);\n }\n else {\n core.warning(message);\n }\n }\n}\nexports.ArtifactPathValidator = ArtifactPathValidator;\n","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.GithubArtifactUploader = void 0;\nconst core = __importStar(require(\"@actions/core\"));\nclass GithubArtifactUploader {\n constructor(releases, replacesExistingArtifacts = true, throwsUploadErrors = false) {\n this.releases = releases;\n this.replacesExistingArtifacts = replacesExistingArtifacts;\n this.throwsUploadErrors = throwsUploadErrors;\n }\n uploadArtifacts(artifacts, releaseId, uploadUrl) {\n return __awaiter(this, void 0, void 0, function* () {\n if (this.replacesExistingArtifacts) {\n yield this.deleteUpdatedArtifacts(artifacts, releaseId);\n }\n for (const artifact of artifacts) {\n yield this.uploadArtifact(artifact, releaseId, uploadUrl);\n }\n });\n }\n uploadArtifact(artifact, releaseId, uploadUrl, retry = 3) {\n return __awaiter(this, void 0, void 0, function* () {\n try {\n core.debug(`Uploading artifact ${artifact.name}...`);\n yield this.releases.uploadArtifact(uploadUrl, artifact.contentLength, artifact.contentType, artifact.readFile(), artifact.name, releaseId);\n }\n catch (error) {\n if (error.status >= 500 && retry > 0) {\n core.warning(`Failed to upload artifact ${artifact.name}. ${error.message}. Retrying...`);\n yield this.uploadArtifact(artifact, releaseId, uploadUrl, retry - 1);\n }\n else {\n if (this.throwsUploadErrors) {\n throw Error(`Failed to upload artifact ${artifact.name}. ${error.message}.`);\n }\n else {\n core.warning(`Failed to upload artifact ${artifact.name}. ${error.message}.`);\n }\n }\n }\n });\n }\n deleteUpdatedArtifacts(artifacts, releaseId) {\n return __awaiter(this, void 0, void 0, function* () {\n const releaseAssets = yield this.releases.listArtifactsForRelease(releaseId);\n const assetByName = {};\n releaseAssets.forEach(asset => {\n assetByName[asset.name] = asset;\n });\n for (const artifact of artifacts) {\n const asset = assetByName[artifact.name];\n if (asset) {\n core.debug(`Deleting existing artifact ${artifact.name}...`);\n yield this.releases.deleteArtifact(asset.id);\n }\n }\n });\n }\n}\nexports.GithubArtifactUploader = GithubArtifactUploader;\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.GithubError = void 0;\nconst GithubErrorDetail_1 = require(\"./GithubErrorDetail\");\nclass GithubError {\n constructor(error) {\n this.error = error;\n this.githubErrors = this.generateGithubErrors();\n }\n generateGithubErrors() {\n const errors = this.error.errors;\n if (errors instanceof Array) {\n return errors.map((err) => new GithubErrorDetail_1.GithubErrorDetail(err));\n }\n else {\n return [];\n }\n }\n get status() {\n return this.error.status;\n }\n hasErrorWithCode(code) {\n return this.githubErrors.some((err) => err.code == code);\n }\n toString() {\n const message = this.error.message;\n const errors = this.githubErrors;\n const status = this.status;\n if (errors.length > 0) {\n return `Error ${status}: ${message}\\nErrors:\\n${this.errorBulletedList(errors)}`;\n }\n else {\n return `Error ${status}: ${message}`;\n }\n }\n errorBulletedList(errors) {\n return errors.map((err) => `- ${err}`).join(\"\\n\");\n }\n}\nexports.GithubError = GithubError;\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.GithubErrorDetail = void 0;\nclass GithubErrorDetail {\n constructor(error) {\n this.error = error;\n }\n get code() {\n return this.error.code;\n }\n toString() {\n const code = this.error.code;\n switch (code) {\n case 'missing':\n return this.missingResourceMessage();\n case 'missing_field':\n return this.missingFieldMessage();\n case 'invalid':\n return this.invalidFieldMessage();\n case 'already_exists':\n return this.resourceAlreadyExists();\n default:\n return this.customErrorMessage();\n }\n }\n customErrorMessage() {\n const message = this.error.message;\n const documentation = this.error.documentation_url;\n let documentationMessage;\n if (documentation) {\n documentationMessage = `\\nPlease see ${documentation}.`;\n }\n else {\n documentationMessage = \"\";\n }\n return `${message}${documentationMessage}`;\n }\n invalidFieldMessage() {\n const resource = this.error.resource;\n const field = this.error.field;\n return `The ${field} field on ${resource} is an invalid format.`;\n }\n missingResourceMessage() {\n const resource = this.error.resource;\n return `${resource} does not exist.`;\n }\n missingFieldMessage() {\n const resource = this.error.resource;\n const field = this.error.field;\n return `The ${field} field on ${resource} is missing.`;\n }\n resourceAlreadyExists() {\n const resource = this.error.resource;\n return `${resource} already exists.`;\n }\n}\nexports.GithubErrorDetail = GithubErrorDetail;\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.FileGlobber = void 0;\nconst glob_1 = require(\"glob\");\nclass FileGlobber {\n glob(pattern) {\n return new glob_1.GlobSync(pattern, { mark: true }).found;\n }\n}\nexports.FileGlobber = FileGlobber;\n","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.CoreInputs = void 0;\nconst core = __importStar(require(\"@actions/core\"));\nconst fs_1 = require(\"fs\");\nclass CoreInputs {\n constructor(artifactGlobber, context) {\n this.artifactGlobber = artifactGlobber;\n this.context = context;\n }\n get allowUpdates() {\n const allow = core.getInput('allowUpdates');\n return allow == 'true';\n }\n get artifacts() {\n let artifacts = core.getInput('artifacts');\n if (!artifacts) {\n artifacts = core.getInput('artifact');\n }\n if (artifacts) {\n let contentType = core.getInput('artifactContentType');\n if (!contentType) {\n contentType = 'raw';\n }\n return this.artifactGlobber\n .globArtifactString(artifacts, contentType, this.artifactErrorsFailBuild);\n }\n return [];\n }\n get artifactErrorsFailBuild() {\n const allow = core.getInput('artifactErrorsFailBuild');\n return allow == 'true';\n }\n get createdReleaseBody() {\n if (CoreInputs.omitBody)\n return undefined;\n return this.body;\n }\n static get omitBody() {\n return core.getInput('omitBody') == 'true';\n }\n get body() {\n const body = core.getInput('body');\n if (body) {\n return body;\n }\n const bodyFile = core.getInput('bodyFile');\n if (bodyFile) {\n return this.stringFromFile(bodyFile);\n }\n return '';\n }\n get commit() {\n return core.getInput('commit');\n }\n get createdReleaseName() {\n if (CoreInputs.omitName)\n return undefined;\n return this.name;\n }\n get discussionCategory() {\n const category = core.getInput('discussionCategory');\n if (category) {\n return category;\n }\n return undefined;\n }\n static get omitName() {\n return core.getInput('omitName') == 'true';\n }\n get name() {\n const name = core.getInput('name');\n if (name) {\n return name;\n }\n return this.tag;\n }\n get draft() {\n const draft = core.getInput('draft');\n return draft == 'true';\n }\n get owner() {\n let owner = core.getInput('owner');\n if (owner) {\n return owner;\n }\n return this.context.repo.owner;\n }\n get createdPrerelease() {\n const preRelease = core.getInput('prerelease');\n return preRelease == 'true';\n }\n static get omitPrereleaseDuringUpdate() {\n return core.getInput('omitPrereleaseDuringUpdate') == 'true';\n }\n get updatedPrerelease() {\n if (CoreInputs.omitPrereleaseDuringUpdate)\n return undefined;\n return this.createdPrerelease;\n }\n get removeArtifacts() {\n const removes = core.getInput('removeArtifacts');\n return removes == 'true';\n }\n get replacesArtifacts() {\n const replaces = core.getInput('replacesArtifacts');\n return replaces == 'true';\n }\n get repo() {\n let repo = core.getInput('repo');\n if (repo) {\n return repo;\n }\n return this.context.repo.repo;\n }\n get tag() {\n const tag = core.getInput('tag');\n if (tag) {\n return tag;\n }\n const ref = this.context.ref;\n const tagPath = \"refs/tags/\";\n if (ref && ref.startsWith(tagPath)) {\n return ref.substr(tagPath.length, ref.length);\n }\n throw Error(\"No tag found in ref or input!\");\n }\n get token() {\n return core.getInput('token', { required: true });\n }\n get updatedReleaseBody() {\n if (CoreInputs.omitBody || CoreInputs.omitBodyDuringUpdate)\n return undefined;\n return this.body;\n }\n static get omitBodyDuringUpdate() {\n return core.getInput('omitBodyDuringUpdate') == 'true';\n }\n get updatedReleaseName() {\n if (CoreInputs.omitName || CoreInputs.omitNameDuringUpdate)\n return undefined;\n return this.name;\n }\n static get omitNameDuringUpdate() {\n return core.getInput('omitNameDuringUpdate') == 'true';\n }\n stringFromFile(path) {\n return (0, fs_1.readFileSync)(path, 'utf-8');\n }\n}\nexports.CoreInputs = CoreInputs;\n","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.CoreOutputs = void 0;\nconst core = __importStar(require(\"@actions/core\"));\nclass CoreOutputs {\n applyReleaseData(releaseData) {\n core.setOutput('id', releaseData.id);\n core.setOutput('html_url', releaseData.html_url);\n core.setOutput('upload_url', releaseData.upload_url);\n }\n}\nexports.CoreOutputs = CoreOutputs;\n","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.GithubReleases = void 0;\nclass GithubReleases {\n constructor(inputs, git) {\n this.inputs = inputs;\n this.git = git;\n }\n create(tag, body, commitHash, discussionCategory, draft, name, prerelease) {\n return __awaiter(this, void 0, void 0, function* () {\n // noinspection TypeScriptValidateJSTypes\n return this.git.rest.repos.createRelease({\n body: body,\n name: name,\n discussion_category_name: discussionCategory,\n draft: draft,\n owner: this.inputs.owner,\n prerelease: prerelease,\n repo: this.inputs.repo,\n target_commitish: commitHash,\n tag_name: tag\n });\n });\n }\n deleteArtifact(assetId) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.git.rest.repos.deleteReleaseAsset({\n asset_id: assetId,\n owner: this.inputs.owner,\n repo: this.inputs.repo\n });\n });\n }\n getByTag(tag) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.git.rest.repos.getReleaseByTag({\n owner: this.inputs.owner,\n repo: this.inputs.repo,\n tag: tag\n });\n });\n }\n listArtifactsForRelease(releaseId) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.git.paginate(this.git.rest.repos.listReleaseAssets, {\n owner: this.inputs.owner,\n release_id: releaseId,\n repo: this.inputs.repo\n });\n });\n }\n listReleases() {\n return __awaiter(this, void 0, void 0, function* () {\n return this.git.rest.repos.listReleases({\n owner: this.inputs.owner,\n repo: this.inputs.repo\n });\n });\n }\n update(id, tag, body, commitHash, discussionCategory, draft, name, prerelease) {\n return __awaiter(this, void 0, void 0, function* () {\n // noinspection TypeScriptValidateJSTypes\n return this.git.rest.repos.updateRelease({\n release_id: id,\n body: body,\n name: name,\n discussion_category_name: discussionCategory,\n draft: draft,\n owner: this.inputs.owner,\n prerelease: prerelease,\n repo: this.inputs.repo,\n target_commitish: commitHash,\n tag_name: tag\n });\n });\n }\n uploadArtifact(assetUrl, contentLength, contentType, file, name, releaseId) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.git.rest.repos.uploadReleaseAsset({\n url: assetUrl,\n headers: {\n \"content-length\": contentLength,\n \"content-type\": contentType\n },\n data: file,\n name: name,\n owner: this.inputs.owner,\n release_id: releaseId,\n repo: this.inputs.repo\n });\n });\n }\n}\nexports.GithubReleases = GithubReleases;\n","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst github = __importStar(require(\"@actions/github\"));\nconst core = __importStar(require(\"@actions/core\"));\nconst Inputs_1 = require(\"./Inputs\");\nconst Releases_1 = require(\"./Releases\");\nconst Action_1 = require(\"./Action\");\nconst ArtifactUploader_1 = require(\"./ArtifactUploader\");\nconst ArtifactGlobber_1 = require(\"./ArtifactGlobber\");\nconst GithubError_1 = require(\"./GithubError\");\nconst Outputs_1 = require(\"./Outputs\");\nconst ArtifactDestroyer_1 = require(\"./ArtifactDestroyer\");\nfunction run() {\n return __awaiter(this, void 0, void 0, function* () {\n try {\n const action = createAction();\n yield action.perform();\n }\n catch (error) {\n const githubError = new GithubError_1.GithubError(error);\n core.setFailed(githubError.toString());\n }\n });\n}\nfunction createAction() {\n const token = core.getInput('token');\n const context = github.context;\n const git = github.getOctokit(token);\n const globber = new ArtifactGlobber_1.FileArtifactGlobber();\n const inputs = new Inputs_1.CoreInputs(globber, context);\n const outputs = new Outputs_1.CoreOutputs();\n const releases = new Releases_1.GithubReleases(inputs, git);\n const uploader = new ArtifactUploader_1.GithubArtifactUploader(releases, inputs.replacesArtifacts, inputs.artifactErrorsFailBuild);\n const artifactDestroyer = new ArtifactDestroyer_1.GithubArtifactDestroyer(releases);\n return new Action_1.Action(inputs, outputs, releases, uploader, artifactDestroyer);\n}\nrun();\n","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.issue = exports.issueCommand = void 0;\nconst os = __importStar(require(\"os\"));\nconst utils_1 = require(\"./utils\");\n/**\n * Commands\n *\n * Command Format:\n * ::name key=value,key=value::message\n *\n * Examples:\n * ::warning::This is the message\n * ::set-env name=MY_VAR::some value\n */\nfunction issueCommand(command, properties, message) {\n const cmd = new Command(command, properties, message);\n process.stdout.write(cmd.toString() + os.EOL);\n}\nexports.issueCommand = issueCommand;\nfunction issue(name, message = '') {\n issueCommand(name, {}, message);\n}\nexports.issue = issue;\nconst CMD_STRING = '::';\nclass Command {\n constructor(command, properties, message) {\n if (!command) {\n command = 'missing.command';\n }\n this.command = command;\n this.properties = properties;\n this.message = message;\n }\n toString() {\n let cmdStr = CMD_STRING + this.command;\n if (this.properties && Object.keys(this.properties).length > 0) {\n cmdStr += ' ';\n let first = true;\n for (const key in this.properties) {\n if (this.properties.hasOwnProperty(key)) {\n const val = this.properties[key];\n if (val) {\n if (first) {\n first = false;\n }\n else {\n cmdStr += ',';\n }\n cmdStr += `${key}=${escapeProperty(val)}`;\n }\n }\n }\n }\n cmdStr += `${CMD_STRING}${escapeData(this.message)}`;\n return cmdStr;\n }\n}\nfunction escapeData(s) {\n return utils_1.toCommandValue(s)\n .replace(/%/g, '%25')\n .replace(/\\r/g, '%0D')\n .replace(/\\n/g, '%0A');\n}\nfunction escapeProperty(s) {\n return utils_1.toCommandValue(s)\n .replace(/%/g, '%25')\n .replace(/\\r/g, '%0D')\n .replace(/\\n/g, '%0A')\n .replace(/:/g, '%3A')\n .replace(/,/g, '%2C');\n}\n//# sourceMappingURL=command.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getIDToken = exports.getState = exports.saveState = exports.group = exports.endGroup = exports.startGroup = exports.info = exports.notice = exports.warning = exports.error = exports.debug = exports.isDebug = exports.setFailed = exports.setCommandEcho = exports.setOutput = exports.getBooleanInput = exports.getMultilineInput = exports.getInput = exports.addPath = exports.setSecret = exports.exportVariable = exports.ExitCode = void 0;\nconst command_1 = require(\"./command\");\nconst file_command_1 = require(\"./file-command\");\nconst utils_1 = require(\"./utils\");\nconst os = __importStar(require(\"os\"));\nconst path = __importStar(require(\"path\"));\nconst oidc_utils_1 = require(\"./oidc-utils\");\n/**\n * The code to exit an action\n */\nvar ExitCode;\n(function (ExitCode) {\n /**\n * A code indicating that the action was successful\n */\n ExitCode[ExitCode[\"Success\"] = 0] = \"Success\";\n /**\n * A code indicating that the action was a failure\n */\n ExitCode[ExitCode[\"Failure\"] = 1] = \"Failure\";\n})(ExitCode = exports.ExitCode || (exports.ExitCode = {}));\n//-----------------------------------------------------------------------\n// Variables\n//-----------------------------------------------------------------------\n/**\n * Sets env variable for this action and future actions in the job\n * @param name the name of the variable to set\n * @param val the value of the variable. Non-string values will be converted to a string via JSON.stringify\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nfunction exportVariable(name, val) {\n const convertedVal = utils_1.toCommandValue(val);\n process.env[name] = convertedVal;\n const filePath = process.env['GITHUB_ENV'] || '';\n if (filePath) {\n const delimiter = '_GitHubActionsFileCommandDelimeter_';\n const commandValue = `${name}<<${delimiter}${os.EOL}${convertedVal}${os.EOL}${delimiter}`;\n file_command_1.issueCommand('ENV', commandValue);\n }\n else {\n command_1.issueCommand('set-env', { name }, convertedVal);\n }\n}\nexports.exportVariable = exportVariable;\n/**\n * Registers a secret which will get masked from logs\n * @param secret value of the secret\n */\nfunction setSecret(secret) {\n command_1.issueCommand('add-mask', {}, secret);\n}\nexports.setSecret = setSecret;\n/**\n * Prepends inputPath to the PATH (for this action and future actions)\n * @param inputPath\n */\nfunction addPath(inputPath) {\n const filePath = process.env['GITHUB_PATH'] || '';\n if (filePath) {\n file_command_1.issueCommand('PATH', inputPath);\n }\n else {\n command_1.issueCommand('add-path', {}, inputPath);\n }\n process.env['PATH'] = `${inputPath}${path.delimiter}${process.env['PATH']}`;\n}\nexports.addPath = addPath;\n/**\n * Gets the value of an input.\n * Unless trimWhitespace is set to false in InputOptions, the value is also trimmed.\n * Returns an empty string if the value is not defined.\n *\n * @param name name of the input to get\n * @param options optional. See InputOptions.\n * @returns string\n */\nfunction getInput(name, options) {\n const val = process.env[`INPUT_${name.replace(/ /g, '_').toUpperCase()}`] || '';\n if (options && options.required && !val) {\n throw new Error(`Input required and not supplied: ${name}`);\n }\n if (options && options.trimWhitespace === false) {\n return val;\n }\n return val.trim();\n}\nexports.getInput = getInput;\n/**\n * Gets the values of an multiline input. Each value is also trimmed.\n *\n * @param name name of the input to get\n * @param options optional. See InputOptions.\n * @returns string[]\n *\n */\nfunction getMultilineInput(name, options) {\n const inputs = getInput(name, options)\n .split('\\n')\n .filter(x => x !== '');\n return inputs;\n}\nexports.getMultilineInput = getMultilineInput;\n/**\n * Gets the input value of the boolean type in the YAML 1.2 \"core schema\" specification.\n * Support boolean input list: `true | True | TRUE | false | False | FALSE` .\n * The return value is also in boolean type.\n * ref: https://yaml.org/spec/1.2/spec.html#id2804923\n *\n * @param name name of the input to get\n * @param options optional. See InputOptions.\n * @returns boolean\n */\nfunction getBooleanInput(name, options) {\n const trueValue = ['true', 'True', 'TRUE'];\n const falseValue = ['false', 'False', 'FALSE'];\n const val = getInput(name, options);\n if (trueValue.includes(val))\n return true;\n if (falseValue.includes(val))\n return false;\n throw new TypeError(`Input does not meet YAML 1.2 \"Core Schema\" specification: ${name}\\n` +\n `Support boolean input list: \\`true | True | TRUE | false | False | FALSE\\``);\n}\nexports.getBooleanInput = getBooleanInput;\n/**\n * Sets the value of an output.\n *\n * @param name name of the output to set\n * @param value value to store. Non-string values will be converted to a string via JSON.stringify\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nfunction setOutput(name, value) {\n process.stdout.write(os.EOL);\n command_1.issueCommand('set-output', { name }, value);\n}\nexports.setOutput = setOutput;\n/**\n * Enables or disables the echoing of commands into stdout for the rest of the step.\n * Echoing is disabled by default if ACTIONS_STEP_DEBUG is not set.\n *\n */\nfunction setCommandEcho(enabled) {\n command_1.issue('echo', enabled ? 'on' : 'off');\n}\nexports.setCommandEcho = setCommandEcho;\n//-----------------------------------------------------------------------\n// Results\n//-----------------------------------------------------------------------\n/**\n * Sets the action status to failed.\n * When the action exits it will be with an exit code of 1\n * @param message add error issue message\n */\nfunction setFailed(message) {\n process.exitCode = ExitCode.Failure;\n error(message);\n}\nexports.setFailed = setFailed;\n//-----------------------------------------------------------------------\n// Logging Commands\n//-----------------------------------------------------------------------\n/**\n * Gets whether Actions Step Debug is on or not\n */\nfunction isDebug() {\n return process.env['RUNNER_DEBUG'] === '1';\n}\nexports.isDebug = isDebug;\n/**\n * Writes debug message to user log\n * @param message debug message\n */\nfunction debug(message) {\n command_1.issueCommand('debug', {}, message);\n}\nexports.debug = debug;\n/**\n * Adds an error issue\n * @param message error issue message. Errors will be converted to string via toString()\n * @param properties optional properties to add to the annotation.\n */\nfunction error(message, properties = {}) {\n command_1.issueCommand('error', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message);\n}\nexports.error = error;\n/**\n * Adds a warning issue\n * @param message warning issue message. Errors will be converted to string via toString()\n * @param properties optional properties to add to the annotation.\n */\nfunction warning(message, properties = {}) {\n command_1.issueCommand('warning', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message);\n}\nexports.warning = warning;\n/**\n * Adds a notice issue\n * @param message notice issue message. Errors will be converted to string via toString()\n * @param properties optional properties to add to the annotation.\n */\nfunction notice(message, properties = {}) {\n command_1.issueCommand('notice', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message);\n}\nexports.notice = notice;\n/**\n * Writes info to log with console.log.\n * @param message info message\n */\nfunction info(message) {\n process.stdout.write(message + os.EOL);\n}\nexports.info = info;\n/**\n * Begin an output group.\n *\n * Output until the next `groupEnd` will be foldable in this group\n *\n * @param name The name of the output group\n */\nfunction startGroup(name) {\n command_1.issue('group', name);\n}\nexports.startGroup = startGroup;\n/**\n * End an output group.\n */\nfunction endGroup() {\n command_1.issue('endgroup');\n}\nexports.endGroup = endGroup;\n/**\n * Wrap an asynchronous function call in a group.\n *\n * Returns the same type as the function itself.\n *\n * @param name The name of the group\n * @param fn The function to wrap in the group\n */\nfunction group(name, fn) {\n return __awaiter(this, void 0, void 0, function* () {\n startGroup(name);\n let result;\n try {\n result = yield fn();\n }\n finally {\n endGroup();\n }\n return result;\n });\n}\nexports.group = group;\n//-----------------------------------------------------------------------\n// Wrapper action state\n//-----------------------------------------------------------------------\n/**\n * Saves state for current action, the state can only be retrieved by this action's post job execution.\n *\n * @param name name of the state to store\n * @param value value to store. Non-string values will be converted to a string via JSON.stringify\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nfunction saveState(name, value) {\n command_1.issueCommand('save-state', { name }, value);\n}\nexports.saveState = saveState;\n/**\n * Gets the value of an state set by this action's main execution.\n *\n * @param name name of the state to get\n * @returns string\n */\nfunction getState(name) {\n return process.env[`STATE_${name}`] || '';\n}\nexports.getState = getState;\nfunction getIDToken(aud) {\n return __awaiter(this, void 0, void 0, function* () {\n return yield oidc_utils_1.OidcClient.getIDToken(aud);\n });\n}\nexports.getIDToken = getIDToken;\n//# sourceMappingURL=core.js.map","\"use strict\";\n// For internal use, subject to change.\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.issueCommand = void 0;\n// We use any as a valid input type\n/* eslint-disable @typescript-eslint/no-explicit-any */\nconst fs = __importStar(require(\"fs\"));\nconst os = __importStar(require(\"os\"));\nconst utils_1 = require(\"./utils\");\nfunction issueCommand(command, message) {\n const filePath = process.env[`GITHUB_${command}`];\n if (!filePath) {\n throw new Error(`Unable to find environment variable for file command ${command}`);\n }\n if (!fs.existsSync(filePath)) {\n throw new Error(`Missing file at path: ${filePath}`);\n }\n fs.appendFileSync(filePath, `${utils_1.toCommandValue(message)}${os.EOL}`, {\n encoding: 'utf8'\n });\n}\nexports.issueCommand = issueCommand;\n//# sourceMappingURL=file-command.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.OidcClient = void 0;\nconst http_client_1 = require(\"@actions/http-client\");\nconst auth_1 = require(\"@actions/http-client/auth\");\nconst core_1 = require(\"./core\");\nclass OidcClient {\n static createHttpClient(allowRetry = true, maxRetry = 10) {\n const requestOptions = {\n allowRetries: allowRetry,\n maxRetries: maxRetry\n };\n return new http_client_1.HttpClient('actions/oidc-client', [new auth_1.BearerCredentialHandler(OidcClient.getRequestToken())], requestOptions);\n }\n static getRequestToken() {\n const token = process.env['ACTIONS_ID_TOKEN_REQUEST_TOKEN'];\n if (!token) {\n throw new Error('Unable to get ACTIONS_ID_TOKEN_REQUEST_TOKEN env variable');\n }\n return token;\n }\n static getIDTokenUrl() {\n const runtimeUrl = process.env['ACTIONS_ID_TOKEN_REQUEST_URL'];\n if (!runtimeUrl) {\n throw new Error('Unable to get ACTIONS_ID_TOKEN_REQUEST_URL env variable');\n }\n return runtimeUrl;\n }\n static getCall(id_token_url) {\n var _a;\n return __awaiter(this, void 0, void 0, function* () {\n const httpclient = OidcClient.createHttpClient();\n const res = yield httpclient\n .getJson(id_token_url)\n .catch(error => {\n throw new Error(`Failed to get ID Token. \\n \n Error Code : ${error.statusCode}\\n \n Error Message: ${error.result.message}`);\n });\n const id_token = (_a = res.result) === null || _a === void 0 ? void 0 : _a.value;\n if (!id_token) {\n throw new Error('Response json body do not have ID Token field');\n }\n return id_token;\n });\n }\n static getIDToken(audience) {\n return __awaiter(this, void 0, void 0, function* () {\n try {\n // New ID Token is requested from action service\n let id_token_url = OidcClient.getIDTokenUrl();\n if (audience) {\n const encodedAudience = encodeURIComponent(audience);\n id_token_url = `${id_token_url}&audience=${encodedAudience}`;\n }\n core_1.debug(`ID token url is ${id_token_url}`);\n const id_token = yield OidcClient.getCall(id_token_url);\n core_1.setSecret(id_token);\n return id_token;\n }\n catch (error) {\n throw new Error(`Error message: ${error.message}`);\n }\n });\n }\n}\nexports.OidcClient = OidcClient;\n//# sourceMappingURL=oidc-utils.js.map","\"use strict\";\n// We use any as a valid input type\n/* eslint-disable @typescript-eslint/no-explicit-any */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.toCommandProperties = exports.toCommandValue = void 0;\n/**\n * Sanitizes an input into a string so it can be passed into issueCommand safely\n * @param input input to sanitize into a string\n */\nfunction toCommandValue(input) {\n if (input === null || input === undefined) {\n return '';\n }\n else if (typeof input === 'string' || input instanceof String) {\n return input;\n }\n return JSON.stringify(input);\n}\nexports.toCommandValue = toCommandValue;\n/**\n *\n * @param annotationProperties\n * @returns The command properties to send with the actual annotation command\n * See IssueCommandProperties: https://github.com/actions/runner/blob/main/src/Runner.Worker/ActionCommandManager.cs#L646\n */\nfunction toCommandProperties(annotationProperties) {\n if (!Object.keys(annotationProperties).length) {\n return {};\n }\n return {\n title: annotationProperties.title,\n file: annotationProperties.file,\n line: annotationProperties.startLine,\n endLine: annotationProperties.endLine,\n col: annotationProperties.startColumn,\n endColumn: annotationProperties.endColumn\n };\n}\nexports.toCommandProperties = toCommandProperties;\n//# sourceMappingURL=utils.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Context = void 0;\nconst fs_1 = require(\"fs\");\nconst os_1 = require(\"os\");\nclass Context {\n /**\n * Hydrate the context from the environment\n */\n constructor() {\n var _a, _b, _c;\n this.payload = {};\n if (process.env.GITHUB_EVENT_PATH) {\n if (fs_1.existsSync(process.env.GITHUB_EVENT_PATH)) {\n this.payload = JSON.parse(fs_1.readFileSync(process.env.GITHUB_EVENT_PATH, { encoding: 'utf8' }));\n }\n else {\n const path = process.env.GITHUB_EVENT_PATH;\n process.stdout.write(`GITHUB_EVENT_PATH ${path} does not exist${os_1.EOL}`);\n }\n }\n this.eventName = process.env.GITHUB_EVENT_NAME;\n this.sha = process.env.GITHUB_SHA;\n this.ref = process.env.GITHUB_REF;\n this.workflow = process.env.GITHUB_WORKFLOW;\n this.action = process.env.GITHUB_ACTION;\n this.actor = process.env.GITHUB_ACTOR;\n this.job = process.env.GITHUB_JOB;\n this.runNumber = parseInt(process.env.GITHUB_RUN_NUMBER, 10);\n this.runId = parseInt(process.env.GITHUB_RUN_ID, 10);\n this.apiUrl = (_a = process.env.GITHUB_API_URL) !== null && _a !== void 0 ? _a : `https://api.github.com`;\n this.serverUrl = (_b = process.env.GITHUB_SERVER_URL) !== null && _b !== void 0 ? _b : `https://github.com`;\n this.graphqlUrl = (_c = process.env.GITHUB_GRAPHQL_URL) !== null && _c !== void 0 ? _c : `https://api.github.com/graphql`;\n }\n get issue() {\n const payload = this.payload;\n return Object.assign(Object.assign({}, this.repo), { number: (payload.issue || payload.pull_request || payload).number });\n }\n get repo() {\n if (process.env.GITHUB_REPOSITORY) {\n const [owner, repo] = process.env.GITHUB_REPOSITORY.split('/');\n return { owner, repo };\n }\n if (this.payload.repository) {\n return {\n owner: this.payload.repository.owner.login,\n repo: this.payload.repository.name\n };\n }\n throw new Error(\"context.repo requires a GITHUB_REPOSITORY environment variable like 'owner/repo'\");\n }\n}\nexports.Context = Context;\n//# sourceMappingURL=context.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getOctokit = exports.context = void 0;\nconst Context = __importStar(require(\"./context\"));\nconst utils_1 = require(\"./utils\");\nexports.context = new Context.Context();\n/**\n * Returns a hydrated octokit ready to use for GitHub Actions\n *\n * @param token the repo PAT or GITHUB_TOKEN\n * @param options other options to set\n */\nfunction getOctokit(token, options) {\n return new utils_1.GitHub(utils_1.getOctokitOptions(token, options));\n}\nexports.getOctokit = getOctokit;\n//# sourceMappingURL=github.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getApiBaseUrl = exports.getProxyAgent = exports.getAuthString = void 0;\nconst httpClient = __importStar(require(\"@actions/http-client\"));\nfunction getAuthString(token, options) {\n if (!token && !options.auth) {\n throw new Error('Parameter token or opts.auth is required');\n }\n else if (token && options.auth) {\n throw new Error('Parameters token and opts.auth may not both be specified');\n }\n return typeof options.auth === 'string' ? options.auth : `token ${token}`;\n}\nexports.getAuthString = getAuthString;\nfunction getProxyAgent(destinationUrl) {\n const hc = new httpClient.HttpClient();\n return hc.getAgent(destinationUrl);\n}\nexports.getProxyAgent = getProxyAgent;\nfunction getApiBaseUrl() {\n return process.env['GITHUB_API_URL'] || 'https://api.github.com';\n}\nexports.getApiBaseUrl = getApiBaseUrl;\n//# sourceMappingURL=utils.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getOctokitOptions = exports.GitHub = exports.context = void 0;\nconst Context = __importStar(require(\"./context\"));\nconst Utils = __importStar(require(\"./internal/utils\"));\n// octokit + plugins\nconst core_1 = require(\"@octokit/core\");\nconst plugin_rest_endpoint_methods_1 = require(\"@octokit/plugin-rest-endpoint-methods\");\nconst plugin_paginate_rest_1 = require(\"@octokit/plugin-paginate-rest\");\nexports.context = new Context.Context();\nconst baseUrl = Utils.getApiBaseUrl();\nconst defaults = {\n baseUrl,\n request: {\n agent: Utils.getProxyAgent(baseUrl)\n }\n};\nexports.GitHub = core_1.Octokit.plugin(plugin_rest_endpoint_methods_1.restEndpointMethods, plugin_paginate_rest_1.paginateRest).defaults(defaults);\n/**\n * Convience function to correctly format Octokit Options to pass into the constructor.\n *\n * @param token the repo PAT or GITHUB_TOKEN\n * @param options other options to set\n */\nfunction getOctokitOptions(token, options) {\n const opts = Object.assign({}, options || {}); // Shallow clone - don't mutate the object provided by the caller\n // Auth\n const auth = Utils.getAuthString(token, opts);\n if (auth) {\n opts.auth = auth;\n }\n return opts;\n}\nexports.getOctokitOptions = getOctokitOptions;\n//# sourceMappingURL=utils.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nclass BasicCredentialHandler {\n constructor(username, password) {\n this.username = username;\n this.password = password;\n }\n prepareRequest(options) {\n options.headers['Authorization'] =\n 'Basic ' +\n Buffer.from(this.username + ':' + this.password).toString('base64');\n }\n // This handler cannot handle 401\n canHandleAuthentication(response) {\n return false;\n }\n handleAuthentication(httpClient, requestInfo, objs) {\n return null;\n }\n}\nexports.BasicCredentialHandler = BasicCredentialHandler;\nclass BearerCredentialHandler {\n constructor(token) {\n this.token = token;\n }\n // currently implements pre-authorization\n // TODO: support preAuth = false where it hooks on 401\n prepareRequest(options) {\n options.headers['Authorization'] = 'Bearer ' + this.token;\n }\n // This handler cannot handle 401\n canHandleAuthentication(response) {\n return false;\n }\n handleAuthentication(httpClient, requestInfo, objs) {\n return null;\n }\n}\nexports.BearerCredentialHandler = BearerCredentialHandler;\nclass PersonalAccessTokenCredentialHandler {\n constructor(token) {\n this.token = token;\n }\n // currently implements pre-authorization\n // TODO: support preAuth = false where it hooks on 401\n prepareRequest(options) {\n options.headers['Authorization'] =\n 'Basic ' + Buffer.from('PAT:' + this.token).toString('base64');\n }\n // This handler cannot handle 401\n canHandleAuthentication(response) {\n return false;\n }\n handleAuthentication(httpClient, requestInfo, objs) {\n return null;\n }\n}\nexports.PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHandler;\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst http = require(\"http\");\nconst https = require(\"https\");\nconst pm = require(\"./proxy\");\nlet tunnel;\nvar HttpCodes;\n(function (HttpCodes) {\n HttpCodes[HttpCodes[\"OK\"] = 200] = \"OK\";\n HttpCodes[HttpCodes[\"MultipleChoices\"] = 300] = \"MultipleChoices\";\n HttpCodes[HttpCodes[\"MovedPermanently\"] = 301] = \"MovedPermanently\";\n HttpCodes[HttpCodes[\"ResourceMoved\"] = 302] = \"ResourceMoved\";\n HttpCodes[HttpCodes[\"SeeOther\"] = 303] = \"SeeOther\";\n HttpCodes[HttpCodes[\"NotModified\"] = 304] = \"NotModified\";\n HttpCodes[HttpCodes[\"UseProxy\"] = 305] = \"UseProxy\";\n HttpCodes[HttpCodes[\"SwitchProxy\"] = 306] = \"SwitchProxy\";\n HttpCodes[HttpCodes[\"TemporaryRedirect\"] = 307] = \"TemporaryRedirect\";\n HttpCodes[HttpCodes[\"PermanentRedirect\"] = 308] = \"PermanentRedirect\";\n HttpCodes[HttpCodes[\"BadRequest\"] = 400] = \"BadRequest\";\n HttpCodes[HttpCodes[\"Unauthorized\"] = 401] = \"Unauthorized\";\n HttpCodes[HttpCodes[\"PaymentRequired\"] = 402] = \"PaymentRequired\";\n HttpCodes[HttpCodes[\"Forbidden\"] = 403] = \"Forbidden\";\n HttpCodes[HttpCodes[\"NotFound\"] = 404] = \"NotFound\";\n HttpCodes[HttpCodes[\"MethodNotAllowed\"] = 405] = \"MethodNotAllowed\";\n HttpCodes[HttpCodes[\"NotAcceptable\"] = 406] = \"NotAcceptable\";\n HttpCodes[HttpCodes[\"ProxyAuthenticationRequired\"] = 407] = \"ProxyAuthenticationRequired\";\n HttpCodes[HttpCodes[\"RequestTimeout\"] = 408] = \"RequestTimeout\";\n HttpCodes[HttpCodes[\"Conflict\"] = 409] = \"Conflict\";\n HttpCodes[HttpCodes[\"Gone\"] = 410] = \"Gone\";\n HttpCodes[HttpCodes[\"TooManyRequests\"] = 429] = \"TooManyRequests\";\n HttpCodes[HttpCodes[\"InternalServerError\"] = 500] = \"InternalServerError\";\n HttpCodes[HttpCodes[\"NotImplemented\"] = 501] = \"NotImplemented\";\n HttpCodes[HttpCodes[\"BadGateway\"] = 502] = \"BadGateway\";\n HttpCodes[HttpCodes[\"ServiceUnavailable\"] = 503] = \"ServiceUnavailable\";\n HttpCodes[HttpCodes[\"GatewayTimeout\"] = 504] = \"GatewayTimeout\";\n})(HttpCodes = exports.HttpCodes || (exports.HttpCodes = {}));\nvar Headers;\n(function (Headers) {\n Headers[\"Accept\"] = \"accept\";\n Headers[\"ContentType\"] = \"content-type\";\n})(Headers = exports.Headers || (exports.Headers = {}));\nvar MediaTypes;\n(function (MediaTypes) {\n MediaTypes[\"ApplicationJson\"] = \"application/json\";\n})(MediaTypes = exports.MediaTypes || (exports.MediaTypes = {}));\n/**\n * Returns the proxy URL, depending upon the supplied url and proxy environment variables.\n * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com\n */\nfunction getProxyUrl(serverUrl) {\n let proxyUrl = pm.getProxyUrl(new URL(serverUrl));\n return proxyUrl ? proxyUrl.href : '';\n}\nexports.getProxyUrl = getProxyUrl;\nconst HttpRedirectCodes = [\n HttpCodes.MovedPermanently,\n HttpCodes.ResourceMoved,\n HttpCodes.SeeOther,\n HttpCodes.TemporaryRedirect,\n HttpCodes.PermanentRedirect\n];\nconst HttpResponseRetryCodes = [\n HttpCodes.BadGateway,\n HttpCodes.ServiceUnavailable,\n HttpCodes.GatewayTimeout\n];\nconst RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD'];\nconst ExponentialBackoffCeiling = 10;\nconst ExponentialBackoffTimeSlice = 5;\nclass HttpClientError extends Error {\n constructor(message, statusCode) {\n super(message);\n this.name = 'HttpClientError';\n this.statusCode = statusCode;\n Object.setPrototypeOf(this, HttpClientError.prototype);\n }\n}\nexports.HttpClientError = HttpClientError;\nclass HttpClientResponse {\n constructor(message) {\n this.message = message;\n }\n readBody() {\n return new Promise(async (resolve, reject) => {\n let output = Buffer.alloc(0);\n this.message.on('data', (chunk) => {\n output = Buffer.concat([output, chunk]);\n });\n this.message.on('end', () => {\n resolve(output.toString());\n });\n });\n }\n}\nexports.HttpClientResponse = HttpClientResponse;\nfunction isHttps(requestUrl) {\n let parsedUrl = new URL(requestUrl);\n return parsedUrl.protocol === 'https:';\n}\nexports.isHttps = isHttps;\nclass HttpClient {\n constructor(userAgent, handlers, requestOptions) {\n this._ignoreSslError = false;\n this._allowRedirects = true;\n this._allowRedirectDowngrade = false;\n this._maxRedirects = 50;\n this._allowRetries = false;\n this._maxRetries = 1;\n this._keepAlive = false;\n this._disposed = false;\n this.userAgent = userAgent;\n this.handlers = handlers || [];\n this.requestOptions = requestOptions;\n if (requestOptions) {\n if (requestOptions.ignoreSslError != null) {\n this._ignoreSslError = requestOptions.ignoreSslError;\n }\n this._socketTimeout = requestOptions.socketTimeout;\n if (requestOptions.allowRedirects != null) {\n this._allowRedirects = requestOptions.allowRedirects;\n }\n if (requestOptions.allowRedirectDowngrade != null) {\n this._allowRedirectDowngrade = requestOptions.allowRedirectDowngrade;\n }\n if (requestOptions.maxRedirects != null) {\n this._maxRedirects = Math.max(requestOptions.maxRedirects, 0);\n }\n if (requestOptions.keepAlive != null) {\n this._keepAlive = requestOptions.keepAlive;\n }\n if (requestOptions.allowRetries != null) {\n this._allowRetries = requestOptions.allowRetries;\n }\n if (requestOptions.maxRetries != null) {\n this._maxRetries = requestOptions.maxRetries;\n }\n }\n }\n options(requestUrl, additionalHeaders) {\n return this.request('OPTIONS', requestUrl, null, additionalHeaders || {});\n }\n get(requestUrl, additionalHeaders) {\n return this.request('GET', requestUrl, null, additionalHeaders || {});\n }\n del(requestUrl, additionalHeaders) {\n return this.request('DELETE', requestUrl, null, additionalHeaders || {});\n }\n post(requestUrl, data, additionalHeaders) {\n return this.request('POST', requestUrl, data, additionalHeaders || {});\n }\n patch(requestUrl, data, additionalHeaders) {\n return this.request('PATCH', requestUrl, data, additionalHeaders || {});\n }\n put(requestUrl, data, additionalHeaders) {\n return this.request('PUT', requestUrl, data, additionalHeaders || {});\n }\n head(requestUrl, additionalHeaders) {\n return this.request('HEAD', requestUrl, null, additionalHeaders || {});\n }\n sendStream(verb, requestUrl, stream, additionalHeaders) {\n return this.request(verb, requestUrl, stream, additionalHeaders);\n }\n /**\n * Gets a typed object from an endpoint\n * Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise\n */\n async getJson(requestUrl, additionalHeaders = {}) {\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n let res = await this.get(requestUrl, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n }\n async postJson(requestUrl, obj, additionalHeaders = {}) {\n let data = JSON.stringify(obj, null, 2);\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);\n let res = await this.post(requestUrl, data, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n }\n async putJson(requestUrl, obj, additionalHeaders = {}) {\n let data = JSON.stringify(obj, null, 2);\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);\n let res = await this.put(requestUrl, data, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n }\n async patchJson(requestUrl, obj, additionalHeaders = {}) {\n let data = JSON.stringify(obj, null, 2);\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);\n let res = await this.patch(requestUrl, data, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n }\n /**\n * Makes a raw http request.\n * All other methods such as get, post, patch, and request ultimately call this.\n * Prefer get, del, post and patch\n */\n async request(verb, requestUrl, data, headers) {\n if (this._disposed) {\n throw new Error('Client has already been disposed.');\n }\n let parsedUrl = new URL(requestUrl);\n let info = this._prepareRequest(verb, parsedUrl, headers);\n // Only perform retries on reads since writes may not be idempotent.\n let maxTries = this._allowRetries && RetryableHttpVerbs.indexOf(verb) != -1\n ? this._maxRetries + 1\n : 1;\n let numTries = 0;\n let response;\n while (numTries < maxTries) {\n response = await this.requestRaw(info, data);\n // Check if it's an authentication challenge\n if (response &&\n response.message &&\n response.message.statusCode === HttpCodes.Unauthorized) {\n let authenticationHandler;\n for (let i = 0; i < this.handlers.length; i++) {\n if (this.handlers[i].canHandleAuthentication(response)) {\n authenticationHandler = this.handlers[i];\n break;\n }\n }\n if (authenticationHandler) {\n return authenticationHandler.handleAuthentication(this, info, data);\n }\n else {\n // We have received an unauthorized response but have no handlers to handle it.\n // Let the response return to the caller.\n return response;\n }\n }\n let redirectsRemaining = this._maxRedirects;\n while (HttpRedirectCodes.indexOf(response.message.statusCode) != -1 &&\n this._allowRedirects &&\n redirectsRemaining > 0) {\n const redirectUrl = response.message.headers['location'];\n if (!redirectUrl) {\n // if there's no location to redirect to, we won't\n break;\n }\n let parsedRedirectUrl = new URL(redirectUrl);\n if (parsedUrl.protocol == 'https:' &&\n parsedUrl.protocol != parsedRedirectUrl.protocol &&\n !this._allowRedirectDowngrade) {\n throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.');\n }\n // we need to finish reading the response before reassigning response\n // which will leak the open socket.\n await response.readBody();\n // strip authorization header if redirected to a different hostname\n if (parsedRedirectUrl.hostname !== parsedUrl.hostname) {\n for (let header in headers) {\n // header names are case insensitive\n if (header.toLowerCase() === 'authorization') {\n delete headers[header];\n }\n }\n }\n // let's make the request with the new redirectUrl\n info = this._prepareRequest(verb, parsedRedirectUrl, headers);\n response = await this.requestRaw(info, data);\n redirectsRemaining--;\n }\n if (HttpResponseRetryCodes.indexOf(response.message.statusCode) == -1) {\n // If not a retry code, return immediately instead of retrying\n return response;\n }\n numTries += 1;\n if (numTries < maxTries) {\n await response.readBody();\n await this._performExponentialBackoff(numTries);\n }\n }\n return response;\n }\n /**\n * Needs to be called if keepAlive is set to true in request options.\n */\n dispose() {\n if (this._agent) {\n this._agent.destroy();\n }\n this._disposed = true;\n }\n /**\n * Raw request.\n * @param info\n * @param data\n */\n requestRaw(info, data) {\n return new Promise((resolve, reject) => {\n let callbackForResult = function (err, res) {\n if (err) {\n reject(err);\n }\n resolve(res);\n };\n this.requestRawWithCallback(info, data, callbackForResult);\n });\n }\n /**\n * Raw request with callback.\n * @param info\n * @param data\n * @param onResult\n */\n requestRawWithCallback(info, data, onResult) {\n let socket;\n if (typeof data === 'string') {\n info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8');\n }\n let callbackCalled = false;\n let handleResult = (err, res) => {\n if (!callbackCalled) {\n callbackCalled = true;\n onResult(err, res);\n }\n };\n let req = info.httpModule.request(info.options, (msg) => {\n let res = new HttpClientResponse(msg);\n handleResult(null, res);\n });\n req.on('socket', sock => {\n socket = sock;\n });\n // If we ever get disconnected, we want the socket to timeout eventually\n req.setTimeout(this._socketTimeout || 3 * 60000, () => {\n if (socket) {\n socket.end();\n }\n handleResult(new Error('Request timeout: ' + info.options.path), null);\n });\n req.on('error', function (err) {\n // err has statusCode property\n // res should have headers\n handleResult(err, null);\n });\n if (data && typeof data === 'string') {\n req.write(data, 'utf8');\n }\n if (data && typeof data !== 'string') {\n data.on('close', function () {\n req.end();\n });\n data.pipe(req);\n }\n else {\n req.end();\n }\n }\n /**\n * Gets an http agent. This function is useful when you need an http agent that handles\n * routing through a proxy server - depending upon the url and proxy environment variables.\n * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com\n */\n getAgent(serverUrl) {\n let parsedUrl = new URL(serverUrl);\n return this._getAgent(parsedUrl);\n }\n _prepareRequest(method, requestUrl, headers) {\n const info = {};\n info.parsedUrl = requestUrl;\n const usingSsl = info.parsedUrl.protocol === 'https:';\n info.httpModule = usingSsl ? https : http;\n const defaultPort = usingSsl ? 443 : 80;\n info.options = {};\n info.options.host = info.parsedUrl.hostname;\n info.options.port = info.parsedUrl.port\n ? parseInt(info.parsedUrl.port)\n : defaultPort;\n info.options.path =\n (info.parsedUrl.pathname || '') + (info.parsedUrl.search || '');\n info.options.method = method;\n info.options.headers = this._mergeHeaders(headers);\n if (this.userAgent != null) {\n info.options.headers['user-agent'] = this.userAgent;\n }\n info.options.agent = this._getAgent(info.parsedUrl);\n // gives handlers an opportunity to participate\n if (this.handlers) {\n this.handlers.forEach(handler => {\n handler.prepareRequest(info.options);\n });\n }\n return info;\n }\n _mergeHeaders(headers) {\n const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {});\n if (this.requestOptions && this.requestOptions.headers) {\n return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers));\n }\n return lowercaseKeys(headers || {});\n }\n _getExistingOrDefaultHeader(additionalHeaders, header, _default) {\n const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {});\n let clientHeader;\n if (this.requestOptions && this.requestOptions.headers) {\n clientHeader = lowercaseKeys(this.requestOptions.headers)[header];\n }\n return additionalHeaders[header] || clientHeader || _default;\n }\n _getAgent(parsedUrl) {\n let agent;\n let proxyUrl = pm.getProxyUrl(parsedUrl);\n let useProxy = proxyUrl && proxyUrl.hostname;\n if (this._keepAlive && useProxy) {\n agent = this._proxyAgent;\n }\n if (this._keepAlive && !useProxy) {\n agent = this._agent;\n }\n // if agent is already assigned use that agent.\n if (!!agent) {\n return agent;\n }\n const usingSsl = parsedUrl.protocol === 'https:';\n let maxSockets = 100;\n if (!!this.requestOptions) {\n maxSockets = this.requestOptions.maxSockets || http.globalAgent.maxSockets;\n }\n if (useProxy) {\n // If using proxy, need tunnel\n if (!tunnel) {\n tunnel = require('tunnel');\n }\n const agentOptions = {\n maxSockets: maxSockets,\n keepAlive: this._keepAlive,\n proxy: {\n ...((proxyUrl.username || proxyUrl.password) && {\n proxyAuth: `${proxyUrl.username}:${proxyUrl.password}`\n }),\n host: proxyUrl.hostname,\n port: proxyUrl.port\n }\n };\n let tunnelAgent;\n const overHttps = proxyUrl.protocol === 'https:';\n if (usingSsl) {\n tunnelAgent = overHttps ? tunnel.httpsOverHttps : tunnel.httpsOverHttp;\n }\n else {\n tunnelAgent = overHttps ? tunnel.httpOverHttps : tunnel.httpOverHttp;\n }\n agent = tunnelAgent(agentOptions);\n this._proxyAgent = agent;\n }\n // if reusing agent across request and tunneling agent isn't assigned create a new agent\n if (this._keepAlive && !agent) {\n const options = { keepAlive: this._keepAlive, maxSockets: maxSockets };\n agent = usingSsl ? new https.Agent(options) : new http.Agent(options);\n this._agent = agent;\n }\n // if not using private agent and tunnel agent isn't setup then use global agent\n if (!agent) {\n agent = usingSsl ? https.globalAgent : http.globalAgent;\n }\n if (usingSsl && this._ignoreSslError) {\n // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process\n // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options\n // we have to cast it to any and change it directly\n agent.options = Object.assign(agent.options || {}, {\n rejectUnauthorized: false\n });\n }\n return agent;\n }\n _performExponentialBackoff(retryNumber) {\n retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber);\n const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber);\n return new Promise(resolve => setTimeout(() => resolve(), ms));\n }\n static dateTimeDeserializer(key, value) {\n if (typeof value === 'string') {\n let a = new Date(value);\n if (!isNaN(a.valueOf())) {\n return a;\n }\n }\n return value;\n }\n async _processResponse(res, options) {\n return new Promise(async (resolve, reject) => {\n const statusCode = res.message.statusCode;\n const response = {\n statusCode: statusCode,\n result: null,\n headers: {}\n };\n // not found leads to null obj returned\n if (statusCode == HttpCodes.NotFound) {\n resolve(response);\n }\n let obj;\n let contents;\n // get the result from the body\n try {\n contents = await res.readBody();\n if (contents && contents.length > 0) {\n if (options && options.deserializeDates) {\n obj = JSON.parse(contents, HttpClient.dateTimeDeserializer);\n }\n else {\n obj = JSON.parse(contents);\n }\n response.result = obj;\n }\n response.headers = res.message.headers;\n }\n catch (err) {\n // Invalid resource (contents not json); leaving result obj null\n }\n // note that 3xx redirects are handled by the http layer.\n if (statusCode > 299) {\n let msg;\n // if exception/error in body, attempt to get better error\n if (obj && obj.message) {\n msg = obj.message;\n }\n else if (contents && contents.length > 0) {\n // it may be the case that the exception is in the body message as string\n msg = contents;\n }\n else {\n msg = 'Failed request: (' + statusCode + ')';\n }\n let err = new HttpClientError(msg, statusCode);\n err.result = response.result;\n reject(err);\n }\n else {\n resolve(response);\n }\n });\n }\n}\nexports.HttpClient = HttpClient;\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nfunction getProxyUrl(reqUrl) {\n let usingSsl = reqUrl.protocol === 'https:';\n let proxyUrl;\n if (checkBypass(reqUrl)) {\n return proxyUrl;\n }\n let proxyVar;\n if (usingSsl) {\n proxyVar = process.env['https_proxy'] || process.env['HTTPS_PROXY'];\n }\n else {\n proxyVar = process.env['http_proxy'] || process.env['HTTP_PROXY'];\n }\n if (proxyVar) {\n proxyUrl = new URL(proxyVar);\n }\n return proxyUrl;\n}\nexports.getProxyUrl = getProxyUrl;\nfunction checkBypass(reqUrl) {\n if (!reqUrl.hostname) {\n return false;\n }\n let noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || '';\n if (!noProxy) {\n return false;\n }\n // Determine the request port\n let reqPort;\n if (reqUrl.port) {\n reqPort = Number(reqUrl.port);\n }\n else if (reqUrl.protocol === 'http:') {\n reqPort = 80;\n }\n else if (reqUrl.protocol === 'https:') {\n reqPort = 443;\n }\n // Format the request hostname and hostname with port\n let upperReqHosts = [reqUrl.hostname.toUpperCase()];\n if (typeof reqPort === 'number') {\n upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`);\n }\n // Compare request host against noproxy\n for (let upperNoProxyItem of noProxy\n .split(',')\n .map(x => x.trim().toUpperCase())\n .filter(x => x)) {\n if (upperReqHosts.some(x => x === upperNoProxyItem)) {\n return true;\n }\n }\n return false;\n}\nexports.checkBypass = checkBypass;\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nasync function auth(token) {\n const tokenType = token.split(/\\./).length === 3 ? \"app\" : /^v\\d+\\./.test(token) ? \"installation\" : \"oauth\";\n return {\n type: \"token\",\n token: token,\n tokenType\n };\n}\n\n/**\n * Prefix token for usage in the Authorization header\n *\n * @param token OAuth token or JSON Web Token\n */\nfunction withAuthorizationPrefix(token) {\n if (token.split(/\\./).length === 3) {\n return `bearer ${token}`;\n }\n\n return `token ${token}`;\n}\n\nasync function hook(token, request, route, parameters) {\n const endpoint = request.endpoint.merge(route, parameters);\n endpoint.headers.authorization = withAuthorizationPrefix(token);\n return request(endpoint);\n}\n\nconst createTokenAuth = function createTokenAuth(token) {\n if (!token) {\n throw new Error(\"[@octokit/auth-token] No token passed to createTokenAuth\");\n }\n\n if (typeof token !== \"string\") {\n throw new Error(\"[@octokit/auth-token] Token passed to createTokenAuth is not a string\");\n }\n\n token = token.replace(/^(token|bearer) +/i, \"\");\n return Object.assign(auth.bind(null, token), {\n hook: hook.bind(null, token)\n });\n};\n\nexports.createTokenAuth = createTokenAuth;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nvar universalUserAgent = require('universal-user-agent');\nvar beforeAfterHook = require('before-after-hook');\nvar request = require('@octokit/request');\nvar graphql = require('@octokit/graphql');\nvar authToken = require('@octokit/auth-token');\n\nfunction _objectWithoutPropertiesLoose(source, excluded) {\n if (source == null) return {};\n var target = {};\n var sourceKeys = Object.keys(source);\n var key, i;\n\n for (i = 0; i < sourceKeys.length; i++) {\n key = sourceKeys[i];\n if (excluded.indexOf(key) >= 0) continue;\n target[key] = source[key];\n }\n\n return target;\n}\n\nfunction _objectWithoutProperties(source, excluded) {\n if (source == null) return {};\n\n var target = _objectWithoutPropertiesLoose(source, excluded);\n\n var key, i;\n\n if (Object.getOwnPropertySymbols) {\n var sourceSymbolKeys = Object.getOwnPropertySymbols(source);\n\n for (i = 0; i < sourceSymbolKeys.length; i++) {\n key = sourceSymbolKeys[i];\n if (excluded.indexOf(key) >= 0) continue;\n if (!Object.prototype.propertyIsEnumerable.call(source, key)) continue;\n target[key] = source[key];\n }\n }\n\n return target;\n}\n\nconst VERSION = \"3.5.1\";\n\nconst _excluded = [\"authStrategy\"];\nclass Octokit {\n constructor(options = {}) {\n const hook = new beforeAfterHook.Collection();\n const requestDefaults = {\n baseUrl: request.request.endpoint.DEFAULTS.baseUrl,\n headers: {},\n request: Object.assign({}, options.request, {\n // @ts-ignore internal usage only, no need to type\n hook: hook.bind(null, \"request\")\n }),\n mediaType: {\n previews: [],\n format: \"\"\n }\n }; // prepend default user agent with `options.userAgent` if set\n\n requestDefaults.headers[\"user-agent\"] = [options.userAgent, `octokit-core.js/${VERSION} ${universalUserAgent.getUserAgent()}`].filter(Boolean).join(\" \");\n\n if (options.baseUrl) {\n requestDefaults.baseUrl = options.baseUrl;\n }\n\n if (options.previews) {\n requestDefaults.mediaType.previews = options.previews;\n }\n\n if (options.timeZone) {\n requestDefaults.headers[\"time-zone\"] = options.timeZone;\n }\n\n this.request = request.request.defaults(requestDefaults);\n this.graphql = graphql.withCustomRequest(this.request).defaults(requestDefaults);\n this.log = Object.assign({\n debug: () => {},\n info: () => {},\n warn: console.warn.bind(console),\n error: console.error.bind(console)\n }, options.log);\n this.hook = hook; // (1) If neither `options.authStrategy` nor `options.auth` are set, the `octokit` instance\n // is unauthenticated. The `this.auth()` method is a no-op and no request hook is registered.\n // (2) If only `options.auth` is set, use the default token authentication strategy.\n // (3) If `options.authStrategy` is set then use it and pass in `options.auth`. Always pass own request as many strategies accept a custom request instance.\n // TODO: type `options.auth` based on `options.authStrategy`.\n\n if (!options.authStrategy) {\n if (!options.auth) {\n // (1)\n this.auth = async () => ({\n type: \"unauthenticated\"\n });\n } else {\n // (2)\n const auth = authToken.createTokenAuth(options.auth); // @ts-ignore ¯\\_(ツ)_/¯\n\n hook.wrap(\"request\", auth.hook);\n this.auth = auth;\n }\n } else {\n const {\n authStrategy\n } = options,\n otherOptions = _objectWithoutProperties(options, _excluded);\n\n const auth = authStrategy(Object.assign({\n request: this.request,\n log: this.log,\n // we pass the current octokit instance as well as its constructor options\n // to allow for authentication strategies that return a new octokit instance\n // that shares the same internal state as the current one. The original\n // requirement for this was the \"event-octokit\" authentication strategy\n // of https://github.com/probot/octokit-auth-probot.\n octokit: this,\n octokitOptions: otherOptions\n }, options.auth)); // @ts-ignore ¯\\_(ツ)_/¯\n\n hook.wrap(\"request\", auth.hook);\n this.auth = auth;\n } // apply plugins\n // https://stackoverflow.com/a/16345172\n\n\n const classConstructor = this.constructor;\n classConstructor.plugins.forEach(plugin => {\n Object.assign(this, plugin(this, options));\n });\n }\n\n static defaults(defaults) {\n const OctokitWithDefaults = class extends this {\n constructor(...args) {\n const options = args[0] || {};\n\n if (typeof defaults === \"function\") {\n super(defaults(options));\n return;\n }\n\n super(Object.assign({}, defaults, options, options.userAgent && defaults.userAgent ? {\n userAgent: `${options.userAgent} ${defaults.userAgent}`\n } : null));\n }\n\n };\n return OctokitWithDefaults;\n }\n /**\n * Attach a plugin (or many) to your Octokit instance.\n *\n * @example\n * const API = Octokit.plugin(plugin1, plugin2, plugin3, ...)\n */\n\n\n static plugin(...newPlugins) {\n var _a;\n\n const currentPlugins = this.plugins;\n const NewOctokit = (_a = class extends this {}, _a.plugins = currentPlugins.concat(newPlugins.filter(plugin => !currentPlugins.includes(plugin))), _a);\n return NewOctokit;\n }\n\n}\nOctokit.VERSION = VERSION;\nOctokit.plugins = [];\n\nexports.Octokit = Octokit;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nvar isPlainObject = require('is-plain-object');\nvar universalUserAgent = require('universal-user-agent');\n\nfunction lowercaseKeys(object) {\n if (!object) {\n return {};\n }\n\n return Object.keys(object).reduce((newObj, key) => {\n newObj[key.toLowerCase()] = object[key];\n return newObj;\n }, {});\n}\n\nfunction mergeDeep(defaults, options) {\n const result = Object.assign({}, defaults);\n Object.keys(options).forEach(key => {\n if (isPlainObject.isPlainObject(options[key])) {\n if (!(key in defaults)) Object.assign(result, {\n [key]: options[key]\n });else result[key] = mergeDeep(defaults[key], options[key]);\n } else {\n Object.assign(result, {\n [key]: options[key]\n });\n }\n });\n return result;\n}\n\nfunction removeUndefinedProperties(obj) {\n for (const key in obj) {\n if (obj[key] === undefined) {\n delete obj[key];\n }\n }\n\n return obj;\n}\n\nfunction merge(defaults, route, options) {\n if (typeof route === \"string\") {\n let [method, url] = route.split(\" \");\n options = Object.assign(url ? {\n method,\n url\n } : {\n url: method\n }, options);\n } else {\n options = Object.assign({}, route);\n } // lowercase header names before merging with defaults to avoid duplicates\n\n\n options.headers = lowercaseKeys(options.headers); // remove properties with undefined values before merging\n\n removeUndefinedProperties(options);\n removeUndefinedProperties(options.headers);\n const mergedOptions = mergeDeep(defaults || {}, options); // mediaType.previews arrays are merged, instead of overwritten\n\n if (defaults && defaults.mediaType.previews.length) {\n mergedOptions.mediaType.previews = defaults.mediaType.previews.filter(preview => !mergedOptions.mediaType.previews.includes(preview)).concat(mergedOptions.mediaType.previews);\n }\n\n mergedOptions.mediaType.previews = mergedOptions.mediaType.previews.map(preview => preview.replace(/-preview/, \"\"));\n return mergedOptions;\n}\n\nfunction addQueryParameters(url, parameters) {\n const separator = /\\?/.test(url) ? \"&\" : \"?\";\n const names = Object.keys(parameters);\n\n if (names.length === 0) {\n return url;\n }\n\n return url + separator + names.map(name => {\n if (name === \"q\") {\n return \"q=\" + parameters.q.split(\"+\").map(encodeURIComponent).join(\"+\");\n }\n\n return `${name}=${encodeURIComponent(parameters[name])}`;\n }).join(\"&\");\n}\n\nconst urlVariableRegex = /\\{[^}]+\\}/g;\n\nfunction removeNonChars(variableName) {\n return variableName.replace(/^\\W+|\\W+$/g, \"\").split(/,/);\n}\n\nfunction extractUrlVariableNames(url) {\n const matches = url.match(urlVariableRegex);\n\n if (!matches) {\n return [];\n }\n\n return matches.map(removeNonChars).reduce((a, b) => a.concat(b), []);\n}\n\nfunction omit(object, keysToOmit) {\n return Object.keys(object).filter(option => !keysToOmit.includes(option)).reduce((obj, key) => {\n obj[key] = object[key];\n return obj;\n }, {});\n}\n\n// Based on https://github.com/bramstein/url-template, licensed under BSD\n// TODO: create separate package.\n//\n// Copyright (c) 2012-2014, Bram Stein\n// All rights reserved.\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions\n// are met:\n// 1. Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n// 2. Redistributions in binary form must reproduce the above copyright\n// notice, this list of conditions and the following disclaimer in the\n// documentation and/or other materials provided with the distribution.\n// 3. The name of the author may not be used to endorse or promote products\n// derived from this software without specific prior written permission.\n// THIS SOFTWARE IS PROVIDED BY THE AUTHOR \"AS IS\" AND ANY EXPRESS OR IMPLIED\n// WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF\n// MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO\n// EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,\n// INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,\n// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY\n// OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING\n// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,\n// EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n/* istanbul ignore file */\nfunction encodeReserved(str) {\n return str.split(/(%[0-9A-Fa-f]{2})/g).map(function (part) {\n if (!/%[0-9A-Fa-f]/.test(part)) {\n part = encodeURI(part).replace(/%5B/g, \"[\").replace(/%5D/g, \"]\");\n }\n\n return part;\n }).join(\"\");\n}\n\nfunction encodeUnreserved(str) {\n return encodeURIComponent(str).replace(/[!'()*]/g, function (c) {\n return \"%\" + c.charCodeAt(0).toString(16).toUpperCase();\n });\n}\n\nfunction encodeValue(operator, value, key) {\n value = operator === \"+\" || operator === \"#\" ? encodeReserved(value) : encodeUnreserved(value);\n\n if (key) {\n return encodeUnreserved(key) + \"=\" + value;\n } else {\n return value;\n }\n}\n\nfunction isDefined(value) {\n return value !== undefined && value !== null;\n}\n\nfunction isKeyOperator(operator) {\n return operator === \";\" || operator === \"&\" || operator === \"?\";\n}\n\nfunction getValues(context, operator, key, modifier) {\n var value = context[key],\n result = [];\n\n if (isDefined(value) && value !== \"\") {\n if (typeof value === \"string\" || typeof value === \"number\" || typeof value === \"boolean\") {\n value = value.toString();\n\n if (modifier && modifier !== \"*\") {\n value = value.substring(0, parseInt(modifier, 10));\n }\n\n result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : \"\"));\n } else {\n if (modifier === \"*\") {\n if (Array.isArray(value)) {\n value.filter(isDefined).forEach(function (value) {\n result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : \"\"));\n });\n } else {\n Object.keys(value).forEach(function (k) {\n if (isDefined(value[k])) {\n result.push(encodeValue(operator, value[k], k));\n }\n });\n }\n } else {\n const tmp = [];\n\n if (Array.isArray(value)) {\n value.filter(isDefined).forEach(function (value) {\n tmp.push(encodeValue(operator, value));\n });\n } else {\n Object.keys(value).forEach(function (k) {\n if (isDefined(value[k])) {\n tmp.push(encodeUnreserved(k));\n tmp.push(encodeValue(operator, value[k].toString()));\n }\n });\n }\n\n if (isKeyOperator(operator)) {\n result.push(encodeUnreserved(key) + \"=\" + tmp.join(\",\"));\n } else if (tmp.length !== 0) {\n result.push(tmp.join(\",\"));\n }\n }\n }\n } else {\n if (operator === \";\") {\n if (isDefined(value)) {\n result.push(encodeUnreserved(key));\n }\n } else if (value === \"\" && (operator === \"&\" || operator === \"?\")) {\n result.push(encodeUnreserved(key) + \"=\");\n } else if (value === \"\") {\n result.push(\"\");\n }\n }\n\n return result;\n}\n\nfunction parseUrl(template) {\n return {\n expand: expand.bind(null, template)\n };\n}\n\nfunction expand(template, context) {\n var operators = [\"+\", \"#\", \".\", \"/\", \";\", \"?\", \"&\"];\n return template.replace(/\\{([^\\{\\}]+)\\}|([^\\{\\}]+)/g, function (_, expression, literal) {\n if (expression) {\n let operator = \"\";\n const values = [];\n\n if (operators.indexOf(expression.charAt(0)) !== -1) {\n operator = expression.charAt(0);\n expression = expression.substr(1);\n }\n\n expression.split(/,/g).forEach(function (variable) {\n var tmp = /([^:\\*]*)(?::(\\d+)|(\\*))?/.exec(variable);\n values.push(getValues(context, operator, tmp[1], tmp[2] || tmp[3]));\n });\n\n if (operator && operator !== \"+\") {\n var separator = \",\";\n\n if (operator === \"?\") {\n separator = \"&\";\n } else if (operator !== \"#\") {\n separator = operator;\n }\n\n return (values.length !== 0 ? operator : \"\") + values.join(separator);\n } else {\n return values.join(\",\");\n }\n } else {\n return encodeReserved(literal);\n }\n });\n}\n\nfunction parse(options) {\n // https://fetch.spec.whatwg.org/#methods\n let method = options.method.toUpperCase(); // replace :varname with {varname} to make it RFC 6570 compatible\n\n let url = (options.url || \"/\").replace(/:([a-z]\\w+)/g, \"{$1}\");\n let headers = Object.assign({}, options.headers);\n let body;\n let parameters = omit(options, [\"method\", \"baseUrl\", \"url\", \"headers\", \"request\", \"mediaType\"]); // extract variable names from URL to calculate remaining variables later\n\n const urlVariableNames = extractUrlVariableNames(url);\n url = parseUrl(url).expand(parameters);\n\n if (!/^http/.test(url)) {\n url = options.baseUrl + url;\n }\n\n const omittedParameters = Object.keys(options).filter(option => urlVariableNames.includes(option)).concat(\"baseUrl\");\n const remainingParameters = omit(parameters, omittedParameters);\n const isBinaryRequest = /application\\/octet-stream/i.test(headers.accept);\n\n if (!isBinaryRequest) {\n if (options.mediaType.format) {\n // e.g. application/vnd.github.v3+json => application/vnd.github.v3.raw\n headers.accept = headers.accept.split(/,/).map(preview => preview.replace(/application\\/vnd(\\.\\w+)(\\.v3)?(\\.\\w+)?(\\+json)?$/, `application/vnd$1$2.${options.mediaType.format}`)).join(\",\");\n }\n\n if (options.mediaType.previews.length) {\n const previewsFromAcceptHeader = headers.accept.match(/[\\w-]+(?=-preview)/g) || [];\n headers.accept = previewsFromAcceptHeader.concat(options.mediaType.previews).map(preview => {\n const format = options.mediaType.format ? `.${options.mediaType.format}` : \"+json\";\n return `application/vnd.github.${preview}-preview${format}`;\n }).join(\",\");\n }\n } // for GET/HEAD requests, set URL query parameters from remaining parameters\n // for PATCH/POST/PUT/DELETE requests, set request body from remaining parameters\n\n\n if ([\"GET\", \"HEAD\"].includes(method)) {\n url = addQueryParameters(url, remainingParameters);\n } else {\n if (\"data\" in remainingParameters) {\n body = remainingParameters.data;\n } else {\n if (Object.keys(remainingParameters).length) {\n body = remainingParameters;\n } else {\n headers[\"content-length\"] = 0;\n }\n }\n } // default content-type for JSON if body is set\n\n\n if (!headers[\"content-type\"] && typeof body !== \"undefined\") {\n headers[\"content-type\"] = \"application/json; charset=utf-8\";\n } // GitHub expects 'content-length: 0' header for PUT/PATCH requests without body.\n // fetch does not allow to set `content-length` header, but we can set body to an empty string\n\n\n if ([\"PATCH\", \"PUT\"].includes(method) && typeof body === \"undefined\") {\n body = \"\";\n } // Only return body/request keys if present\n\n\n return Object.assign({\n method,\n url,\n headers\n }, typeof body !== \"undefined\" ? {\n body\n } : null, options.request ? {\n request: options.request\n } : null);\n}\n\nfunction endpointWithDefaults(defaults, route, options) {\n return parse(merge(defaults, route, options));\n}\n\nfunction withDefaults(oldDefaults, newDefaults) {\n const DEFAULTS = merge(oldDefaults, newDefaults);\n const endpoint = endpointWithDefaults.bind(null, DEFAULTS);\n return Object.assign(endpoint, {\n DEFAULTS,\n defaults: withDefaults.bind(null, DEFAULTS),\n merge: merge.bind(null, DEFAULTS),\n parse\n });\n}\n\nconst VERSION = \"6.0.11\";\n\nconst userAgent = `octokit-endpoint.js/${VERSION} ${universalUserAgent.getUserAgent()}`; // DEFAULTS has all properties set that EndpointOptions has, except url.\n// So we use RequestParameters and add method as additional required property.\n\nconst DEFAULTS = {\n method: \"GET\",\n baseUrl: \"https://api.github.com\",\n headers: {\n accept: \"application/vnd.github.v3+json\",\n \"user-agent\": userAgent\n },\n mediaType: {\n format: \"\",\n previews: []\n }\n};\n\nconst endpoint = withDefaults(null, DEFAULTS);\n\nexports.endpoint = endpoint;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nvar request = require('@octokit/request');\nvar universalUserAgent = require('universal-user-agent');\n\nconst VERSION = \"4.6.0\";\n\nclass GraphqlError extends Error {\n constructor(request, response) {\n const message = response.data.errors[0].message;\n super(message);\n Object.assign(this, response.data);\n Object.assign(this, {\n headers: response.headers\n });\n this.name = \"GraphqlError\";\n this.request = request; // Maintains proper stack trace (only available on V8)\n\n /* istanbul ignore next */\n\n if (Error.captureStackTrace) {\n Error.captureStackTrace(this, this.constructor);\n }\n }\n\n}\n\nconst NON_VARIABLE_OPTIONS = [\"method\", \"baseUrl\", \"url\", \"headers\", \"request\", \"query\", \"mediaType\"];\nconst GHES_V3_SUFFIX_REGEX = /\\/api\\/v3\\/?$/;\nfunction graphql(request, query, options) {\n if (typeof query === \"string\" && options && \"query\" in options) {\n return Promise.reject(new Error(`[@octokit/graphql] \"query\" cannot be used as variable name`));\n }\n\n const parsedOptions = typeof query === \"string\" ? Object.assign({\n query\n }, options) : query;\n const requestOptions = Object.keys(parsedOptions).reduce((result, key) => {\n if (NON_VARIABLE_OPTIONS.includes(key)) {\n result[key] = parsedOptions[key];\n return result;\n }\n\n if (!result.variables) {\n result.variables = {};\n }\n\n result.variables[key] = parsedOptions[key];\n return result;\n }, {}); // workaround for GitHub Enterprise baseUrl set with /api/v3 suffix\n // https://github.com/octokit/auth-app.js/issues/111#issuecomment-657610451\n\n const baseUrl = parsedOptions.baseUrl || request.endpoint.DEFAULTS.baseUrl;\n\n if (GHES_V3_SUFFIX_REGEX.test(baseUrl)) {\n requestOptions.url = baseUrl.replace(GHES_V3_SUFFIX_REGEX, \"/api/graphql\");\n }\n\n return request(requestOptions).then(response => {\n if (response.data.errors) {\n const headers = {};\n\n for (const key of Object.keys(response.headers)) {\n headers[key] = response.headers[key];\n }\n\n throw new GraphqlError(requestOptions, {\n headers,\n data: response.data\n });\n }\n\n return response.data.data;\n });\n}\n\nfunction withDefaults(request$1, newDefaults) {\n const newRequest = request$1.defaults(newDefaults);\n\n const newApi = (query, options) => {\n return graphql(newRequest, query, options);\n };\n\n return Object.assign(newApi, {\n defaults: withDefaults.bind(null, newRequest),\n endpoint: request.request.endpoint\n });\n}\n\nconst graphql$1 = withDefaults(request.request, {\n headers: {\n \"user-agent\": `octokit-graphql.js/${VERSION} ${universalUserAgent.getUserAgent()}`\n },\n method: \"POST\",\n url: \"/graphql\"\n});\nfunction withCustomRequest(customRequest) {\n return withDefaults(customRequest, {\n method: \"POST\",\n url: \"/graphql\"\n });\n}\n\nexports.graphql = graphql$1;\nexports.withCustomRequest = withCustomRequest;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nfunction _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }\n\nvar deprecation = require('deprecation');\nvar once = _interopDefault(require('once'));\n\nconst logOnce = once(deprecation => console.warn(deprecation));\n/**\n * Error with extra properties to help with debugging\n */\n\nclass RequestError extends Error {\n constructor(message, statusCode, options) {\n super(message); // Maintains proper stack trace (only available on V8)\n\n /* istanbul ignore next */\n\n if (Error.captureStackTrace) {\n Error.captureStackTrace(this, this.constructor);\n }\n\n this.name = \"HttpError\";\n this.status = statusCode;\n Object.defineProperty(this, \"code\", {\n get() {\n logOnce(new deprecation.Deprecation(\"[@octokit/request-error] `error.code` is deprecated, use `error.status`.\"));\n return statusCode;\n }\n\n });\n this.headers = options.headers || {}; // redact request credentials without mutating original request options\n\n const requestCopy = Object.assign({}, options.request);\n\n if (options.request.headers.authorization) {\n requestCopy.headers = Object.assign({}, options.request.headers, {\n authorization: options.request.headers.authorization.replace(/ .*$/, \" [REDACTED]\")\n });\n }\n\n requestCopy.url = requestCopy.url // client_id & client_secret can be passed as URL query parameters to increase rate limit\n // see https://developer.github.com/v3/#increasing-the-unauthenticated-rate-limit-for-oauth-applications\n .replace(/\\bclient_secret=\\w+/g, \"client_secret=[REDACTED]\") // OAuth tokens can be passed as URL query parameters, although it is not recommended\n // see https://developer.github.com/v3/#oauth2-token-sent-in-a-header\n .replace(/\\baccess_token=\\w+/g, \"access_token=[REDACTED]\");\n this.request = requestCopy;\n }\n\n}\n\nexports.RequestError = RequestError;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nfunction _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }\n\nvar endpoint = require('@octokit/endpoint');\nvar universalUserAgent = require('universal-user-agent');\nvar isPlainObject = require('is-plain-object');\nvar nodeFetch = _interopDefault(require('node-fetch'));\nvar requestError = require('@octokit/request-error');\n\nconst VERSION = \"5.4.14\";\n\nfunction getBufferResponse(response) {\n return response.arrayBuffer();\n}\n\nfunction fetchWrapper(requestOptions) {\n if (isPlainObject.isPlainObject(requestOptions.body) || Array.isArray(requestOptions.body)) {\n requestOptions.body = JSON.stringify(requestOptions.body);\n }\n\n let headers = {};\n let status;\n let url;\n const fetch = requestOptions.request && requestOptions.request.fetch || nodeFetch;\n return fetch(requestOptions.url, Object.assign({\n method: requestOptions.method,\n body: requestOptions.body,\n headers: requestOptions.headers,\n redirect: requestOptions.redirect\n }, requestOptions.request)).then(response => {\n url = response.url;\n status = response.status;\n\n for (const keyAndValue of response.headers) {\n headers[keyAndValue[0]] = keyAndValue[1];\n }\n\n if (status === 204 || status === 205) {\n return;\n } // GitHub API returns 200 for HEAD requests\n\n\n if (requestOptions.method === \"HEAD\") {\n if (status < 400) {\n return;\n }\n\n throw new requestError.RequestError(response.statusText, status, {\n headers,\n request: requestOptions\n });\n }\n\n if (status === 304) {\n throw new requestError.RequestError(\"Not modified\", status, {\n headers,\n request: requestOptions\n });\n }\n\n if (status >= 400) {\n return response.text().then(message => {\n const error = new requestError.RequestError(message, status, {\n headers,\n request: requestOptions\n });\n\n try {\n let responseBody = JSON.parse(error.message);\n Object.assign(error, responseBody);\n let errors = responseBody.errors; // Assumption `errors` would always be in Array format\n\n error.message = error.message + \": \" + errors.map(JSON.stringify).join(\", \");\n } catch (e) {// ignore, see octokit/rest.js#684\n }\n\n throw error;\n });\n }\n\n const contentType = response.headers.get(\"content-type\");\n\n if (/application\\/json/.test(contentType)) {\n return response.json();\n }\n\n if (!contentType || /^text\\/|charset=utf-8$/.test(contentType)) {\n return response.text();\n }\n\n return getBufferResponse(response);\n }).then(data => {\n return {\n status,\n url,\n headers,\n data\n };\n }).catch(error => {\n if (error instanceof requestError.RequestError) {\n throw error;\n }\n\n throw new requestError.RequestError(error.message, 500, {\n headers,\n request: requestOptions\n });\n });\n}\n\nfunction withDefaults(oldEndpoint, newDefaults) {\n const endpoint = oldEndpoint.defaults(newDefaults);\n\n const newApi = function (route, parameters) {\n const endpointOptions = endpoint.merge(route, parameters);\n\n if (!endpointOptions.request || !endpointOptions.request.hook) {\n return fetchWrapper(endpoint.parse(endpointOptions));\n }\n\n const request = (route, parameters) => {\n return fetchWrapper(endpoint.parse(endpoint.merge(route, parameters)));\n };\n\n Object.assign(request, {\n endpoint,\n defaults: withDefaults.bind(null, endpoint)\n });\n return endpointOptions.request.hook(request, endpointOptions);\n };\n\n return Object.assign(newApi, {\n endpoint,\n defaults: withDefaults.bind(null, endpoint)\n });\n}\n\nconst request = withDefaults(endpoint.endpoint, {\n headers: {\n \"user-agent\": `octokit-request.js/${VERSION} ${universalUserAgent.getUserAgent()}`\n }\n});\n\nexports.request = request;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nconst VERSION = \"2.13.6\";\n\nfunction ownKeys(object, enumerableOnly) {\n var keys = Object.keys(object);\n\n if (Object.getOwnPropertySymbols) {\n var symbols = Object.getOwnPropertySymbols(object);\n\n if (enumerableOnly) {\n symbols = symbols.filter(function (sym) {\n return Object.getOwnPropertyDescriptor(object, sym).enumerable;\n });\n }\n\n keys.push.apply(keys, symbols);\n }\n\n return keys;\n}\n\nfunction _objectSpread2(target) {\n for (var i = 1; i < arguments.length; i++) {\n var source = arguments[i] != null ? arguments[i] : {};\n\n if (i % 2) {\n ownKeys(Object(source), true).forEach(function (key) {\n _defineProperty(target, key, source[key]);\n });\n } else if (Object.getOwnPropertyDescriptors) {\n Object.defineProperties(target, Object.getOwnPropertyDescriptors(source));\n } else {\n ownKeys(Object(source)).forEach(function (key) {\n Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key));\n });\n }\n }\n\n return target;\n}\n\nfunction _defineProperty(obj, key, value) {\n if (key in obj) {\n Object.defineProperty(obj, key, {\n value: value,\n enumerable: true,\n configurable: true,\n writable: true\n });\n } else {\n obj[key] = value;\n }\n\n return obj;\n}\n\n/**\n * Some “list” response that can be paginated have a different response structure\n *\n * They have a `total_count` key in the response (search also has `incomplete_results`,\n * /installation/repositories also has `repository_selection`), as well as a key with\n * the list of the items which name varies from endpoint to endpoint.\n *\n * Octokit normalizes these responses so that paginated results are always returned following\n * the same structure. One challenge is that if the list response has only one page, no Link\n * header is provided, so this header alone is not sufficient to check wether a response is\n * paginated or not.\n *\n * We check if a \"total_count\" key is present in the response data, but also make sure that\n * a \"url\" property is not, as the \"Get the combined status for a specific ref\" endpoint would\n * otherwise match: https://developer.github.com/v3/repos/statuses/#get-the-combined-status-for-a-specific-ref\n */\nfunction normalizePaginatedListResponse(response) {\n // endpoints can respond with 204 if repository is empty\n if (!response.data) {\n return _objectSpread2(_objectSpread2({}, response), {}, {\n data: []\n });\n }\n\n const responseNeedsNormalization = \"total_count\" in response.data && !(\"url\" in response.data);\n if (!responseNeedsNormalization) return response; // keep the additional properties intact as there is currently no other way\n // to retrieve the same information.\n\n const incompleteResults = response.data.incomplete_results;\n const repositorySelection = response.data.repository_selection;\n const totalCount = response.data.total_count;\n delete response.data.incomplete_results;\n delete response.data.repository_selection;\n delete response.data.total_count;\n const namespaceKey = Object.keys(response.data)[0];\n const data = response.data[namespaceKey];\n response.data = data;\n\n if (typeof incompleteResults !== \"undefined\") {\n response.data.incomplete_results = incompleteResults;\n }\n\n if (typeof repositorySelection !== \"undefined\") {\n response.data.repository_selection = repositorySelection;\n }\n\n response.data.total_count = totalCount;\n return response;\n}\n\nfunction iterator(octokit, route, parameters) {\n const options = typeof route === \"function\" ? route.endpoint(parameters) : octokit.request.endpoint(route, parameters);\n const requestMethod = typeof route === \"function\" ? route : octokit.request;\n const method = options.method;\n const headers = options.headers;\n let url = options.url;\n return {\n [Symbol.asyncIterator]: () => ({\n async next() {\n if (!url) return {\n done: true\n };\n\n try {\n const response = await requestMethod({\n method,\n url,\n headers\n });\n const normalizedResponse = normalizePaginatedListResponse(response); // `response.headers.link` format:\n // '; rel=\"next\", ; rel=\"last\"'\n // sets `url` to undefined if \"next\" URL is not present or `link` header is not set\n\n url = ((normalizedResponse.headers.link || \"\").match(/<([^>]+)>;\\s*rel=\"next\"/) || [])[1];\n return {\n value: normalizedResponse\n };\n } catch (error) {\n if (error.status !== 409) throw error;\n url = \"\";\n return {\n value: {\n status: 200,\n headers: {},\n data: []\n }\n };\n }\n }\n\n })\n };\n}\n\nfunction paginate(octokit, route, parameters, mapFn) {\n if (typeof parameters === \"function\") {\n mapFn = parameters;\n parameters = undefined;\n }\n\n return gather(octokit, [], iterator(octokit, route, parameters)[Symbol.asyncIterator](), mapFn);\n}\n\nfunction gather(octokit, results, iterator, mapFn) {\n return iterator.next().then(result => {\n if (result.done) {\n return results;\n }\n\n let earlyExit = false;\n\n function done() {\n earlyExit = true;\n }\n\n results = results.concat(mapFn ? mapFn(result.value, done) : result.value.data);\n\n if (earlyExit) {\n return results;\n }\n\n return gather(octokit, results, iterator, mapFn);\n });\n}\n\nconst composePaginateRest = Object.assign(paginate, {\n iterator\n});\n\nconst paginatingEndpoints = [\"GET /app/installations\", \"GET /applications/grants\", \"GET /authorizations\", \"GET /enterprises/{enterprise}/actions/permissions/organizations\", \"GET /enterprises/{enterprise}/actions/runner-groups\", \"GET /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/organizations\", \"GET /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/runners\", \"GET /enterprises/{enterprise}/actions/runners\", \"GET /enterprises/{enterprise}/actions/runners/downloads\", \"GET /events\", \"GET /gists\", \"GET /gists/public\", \"GET /gists/starred\", \"GET /gists/{gist_id}/comments\", \"GET /gists/{gist_id}/commits\", \"GET /gists/{gist_id}/forks\", \"GET /installation/repositories\", \"GET /issues\", \"GET /marketplace_listing/plans\", \"GET /marketplace_listing/plans/{plan_id}/accounts\", \"GET /marketplace_listing/stubbed/plans\", \"GET /marketplace_listing/stubbed/plans/{plan_id}/accounts\", \"GET /networks/{owner}/{repo}/events\", \"GET /notifications\", \"GET /organizations\", \"GET /orgs/{org}/actions/permissions/repositories\", \"GET /orgs/{org}/actions/runner-groups\", \"GET /orgs/{org}/actions/runner-groups/{runner_group_id}/repositories\", \"GET /orgs/{org}/actions/runner-groups/{runner_group_id}/runners\", \"GET /orgs/{org}/actions/runners\", \"GET /orgs/{org}/actions/runners/downloads\", \"GET /orgs/{org}/actions/secrets\", \"GET /orgs/{org}/actions/secrets/{secret_name}/repositories\", \"GET /orgs/{org}/blocks\", \"GET /orgs/{org}/credential-authorizations\", \"GET /orgs/{org}/events\", \"GET /orgs/{org}/failed_invitations\", \"GET /orgs/{org}/hooks\", \"GET /orgs/{org}/installations\", \"GET /orgs/{org}/invitations\", \"GET /orgs/{org}/invitations/{invitation_id}/teams\", \"GET /orgs/{org}/issues\", \"GET /orgs/{org}/members\", \"GET /orgs/{org}/migrations\", \"GET /orgs/{org}/migrations/{migration_id}/repositories\", \"GET /orgs/{org}/outside_collaborators\", \"GET /orgs/{org}/projects\", \"GET /orgs/{org}/public_members\", \"GET /orgs/{org}/repos\", \"GET /orgs/{org}/team-sync/groups\", \"GET /orgs/{org}/teams\", \"GET /orgs/{org}/teams/{team_slug}/discussions\", \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments\", \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions\", \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions\", \"GET /orgs/{org}/teams/{team_slug}/invitations\", \"GET /orgs/{org}/teams/{team_slug}/members\", \"GET /orgs/{org}/teams/{team_slug}/projects\", \"GET /orgs/{org}/teams/{team_slug}/repos\", \"GET /orgs/{org}/teams/{team_slug}/team-sync/group-mappings\", \"GET /orgs/{org}/teams/{team_slug}/teams\", \"GET /projects/columns/{column_id}/cards\", \"GET /projects/{project_id}/collaborators\", \"GET /projects/{project_id}/columns\", \"GET /repos/{owner}/{repo}/actions/artifacts\", \"GET /repos/{owner}/{repo}/actions/runners\", \"GET /repos/{owner}/{repo}/actions/runners/downloads\", \"GET /repos/{owner}/{repo}/actions/runs\", \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts\", \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs\", \"GET /repos/{owner}/{repo}/actions/secrets\", \"GET /repos/{owner}/{repo}/actions/workflows\", \"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs\", \"GET /repos/{owner}/{repo}/assignees\", \"GET /repos/{owner}/{repo}/branches\", \"GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations\", \"GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs\", \"GET /repos/{owner}/{repo}/code-scanning/alerts\", \"GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances\", \"GET /repos/{owner}/{repo}/code-scanning/analyses\", \"GET /repos/{owner}/{repo}/collaborators\", \"GET /repos/{owner}/{repo}/comments\", \"GET /repos/{owner}/{repo}/comments/{comment_id}/reactions\", \"GET /repos/{owner}/{repo}/commits\", \"GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head\", \"GET /repos/{owner}/{repo}/commits/{commit_sha}/comments\", \"GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls\", \"GET /repos/{owner}/{repo}/commits/{ref}/check-runs\", \"GET /repos/{owner}/{repo}/commits/{ref}/check-suites\", \"GET /repos/{owner}/{repo}/commits/{ref}/statuses\", \"GET /repos/{owner}/{repo}/contributors\", \"GET /repos/{owner}/{repo}/deployments\", \"GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses\", \"GET /repos/{owner}/{repo}/events\", \"GET /repos/{owner}/{repo}/forks\", \"GET /repos/{owner}/{repo}/git/matching-refs/{ref}\", \"GET /repos/{owner}/{repo}/hooks\", \"GET /repos/{owner}/{repo}/invitations\", \"GET /repos/{owner}/{repo}/issues\", \"GET /repos/{owner}/{repo}/issues/comments\", \"GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions\", \"GET /repos/{owner}/{repo}/issues/events\", \"GET /repos/{owner}/{repo}/issues/{issue_number}/comments\", \"GET /repos/{owner}/{repo}/issues/{issue_number}/events\", \"GET /repos/{owner}/{repo}/issues/{issue_number}/labels\", \"GET /repos/{owner}/{repo}/issues/{issue_number}/reactions\", \"GET /repos/{owner}/{repo}/issues/{issue_number}/timeline\", \"GET /repos/{owner}/{repo}/keys\", \"GET /repos/{owner}/{repo}/labels\", \"GET /repos/{owner}/{repo}/milestones\", \"GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels\", \"GET /repos/{owner}/{repo}/notifications\", \"GET /repos/{owner}/{repo}/pages/builds\", \"GET /repos/{owner}/{repo}/projects\", \"GET /repos/{owner}/{repo}/pulls\", \"GET /repos/{owner}/{repo}/pulls/comments\", \"GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions\", \"GET /repos/{owner}/{repo}/pulls/{pull_number}/comments\", \"GET /repos/{owner}/{repo}/pulls/{pull_number}/commits\", \"GET /repos/{owner}/{repo}/pulls/{pull_number}/files\", \"GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers\", \"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews\", \"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments\", \"GET /repos/{owner}/{repo}/releases\", \"GET /repos/{owner}/{repo}/releases/{release_id}/assets\", \"GET /repos/{owner}/{repo}/secret-scanning/alerts\", \"GET /repos/{owner}/{repo}/stargazers\", \"GET /repos/{owner}/{repo}/subscribers\", \"GET /repos/{owner}/{repo}/tags\", \"GET /repos/{owner}/{repo}/teams\", \"GET /repositories\", \"GET /repositories/{repository_id}/environments/{environment_name}/secrets\", \"GET /scim/v2/enterprises/{enterprise}/Groups\", \"GET /scim/v2/enterprises/{enterprise}/Users\", \"GET /scim/v2/organizations/{org}/Users\", \"GET /search/code\", \"GET /search/commits\", \"GET /search/issues\", \"GET /search/labels\", \"GET /search/repositories\", \"GET /search/topics\", \"GET /search/users\", \"GET /teams/{team_id}/discussions\", \"GET /teams/{team_id}/discussions/{discussion_number}/comments\", \"GET /teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}/reactions\", \"GET /teams/{team_id}/discussions/{discussion_number}/reactions\", \"GET /teams/{team_id}/invitations\", \"GET /teams/{team_id}/members\", \"GET /teams/{team_id}/projects\", \"GET /teams/{team_id}/repos\", \"GET /teams/{team_id}/team-sync/group-mappings\", \"GET /teams/{team_id}/teams\", \"GET /user/blocks\", \"GET /user/emails\", \"GET /user/followers\", \"GET /user/following\", \"GET /user/gpg_keys\", \"GET /user/installations\", \"GET /user/installations/{installation_id}/repositories\", \"GET /user/issues\", \"GET /user/keys\", \"GET /user/marketplace_purchases\", \"GET /user/marketplace_purchases/stubbed\", \"GET /user/memberships/orgs\", \"GET /user/migrations\", \"GET /user/migrations/{migration_id}/repositories\", \"GET /user/orgs\", \"GET /user/public_emails\", \"GET /user/repos\", \"GET /user/repository_invitations\", \"GET /user/starred\", \"GET /user/subscriptions\", \"GET /user/teams\", \"GET /users\", \"GET /users/{username}/events\", \"GET /users/{username}/events/orgs/{org}\", \"GET /users/{username}/events/public\", \"GET /users/{username}/followers\", \"GET /users/{username}/following\", \"GET /users/{username}/gists\", \"GET /users/{username}/gpg_keys\", \"GET /users/{username}/keys\", \"GET /users/{username}/orgs\", \"GET /users/{username}/projects\", \"GET /users/{username}/received_events\", \"GET /users/{username}/received_events/public\", \"GET /users/{username}/repos\", \"GET /users/{username}/starred\", \"GET /users/{username}/subscriptions\"];\n\nfunction isPaginatingEndpoint(arg) {\n if (typeof arg === \"string\") {\n return paginatingEndpoints.includes(arg);\n } else {\n return false;\n }\n}\n\n/**\n * @param octokit Octokit instance\n * @param options Options passed to Octokit constructor\n */\n\nfunction paginateRest(octokit) {\n return {\n paginate: Object.assign(paginate.bind(null, octokit), {\n iterator: iterator.bind(null, octokit)\n })\n };\n}\npaginateRest.VERSION = VERSION;\n\nexports.composePaginateRest = composePaginateRest;\nexports.isPaginatingEndpoint = isPaginatingEndpoint;\nexports.paginateRest = paginateRest;\nexports.paginatingEndpoints = paginatingEndpoints;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nfunction ownKeys(object, enumerableOnly) {\n var keys = Object.keys(object);\n\n if (Object.getOwnPropertySymbols) {\n var symbols = Object.getOwnPropertySymbols(object);\n\n if (enumerableOnly) {\n symbols = symbols.filter(function (sym) {\n return Object.getOwnPropertyDescriptor(object, sym).enumerable;\n });\n }\n\n keys.push.apply(keys, symbols);\n }\n\n return keys;\n}\n\nfunction _objectSpread2(target) {\n for (var i = 1; i < arguments.length; i++) {\n var source = arguments[i] != null ? arguments[i] : {};\n\n if (i % 2) {\n ownKeys(Object(source), true).forEach(function (key) {\n _defineProperty(target, key, source[key]);\n });\n } else if (Object.getOwnPropertyDescriptors) {\n Object.defineProperties(target, Object.getOwnPropertyDescriptors(source));\n } else {\n ownKeys(Object(source)).forEach(function (key) {\n Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key));\n });\n }\n }\n\n return target;\n}\n\nfunction _defineProperty(obj, key, value) {\n if (key in obj) {\n Object.defineProperty(obj, key, {\n value: value,\n enumerable: true,\n configurable: true,\n writable: true\n });\n } else {\n obj[key] = value;\n }\n\n return obj;\n}\n\nconst Endpoints = {\n actions: {\n addSelectedRepoToOrgSecret: [\"PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}\"],\n approveWorkflowRun: [\"POST /repos/{owner}/{repo}/actions/runs/{run_id}/approve\"],\n cancelWorkflowRun: [\"POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel\"],\n createOrUpdateEnvironmentSecret: [\"PUT /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}\"],\n createOrUpdateOrgSecret: [\"PUT /orgs/{org}/actions/secrets/{secret_name}\"],\n createOrUpdateRepoSecret: [\"PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}\"],\n createRegistrationTokenForOrg: [\"POST /orgs/{org}/actions/runners/registration-token\"],\n createRegistrationTokenForRepo: [\"POST /repos/{owner}/{repo}/actions/runners/registration-token\"],\n createRemoveTokenForOrg: [\"POST /orgs/{org}/actions/runners/remove-token\"],\n createRemoveTokenForRepo: [\"POST /repos/{owner}/{repo}/actions/runners/remove-token\"],\n createWorkflowDispatch: [\"POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches\"],\n deleteArtifact: [\"DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}\"],\n deleteEnvironmentSecret: [\"DELETE /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}\"],\n deleteOrgSecret: [\"DELETE /orgs/{org}/actions/secrets/{secret_name}\"],\n deleteRepoSecret: [\"DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}\"],\n deleteSelfHostedRunnerFromOrg: [\"DELETE /orgs/{org}/actions/runners/{runner_id}\"],\n deleteSelfHostedRunnerFromRepo: [\"DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}\"],\n deleteWorkflowRun: [\"DELETE /repos/{owner}/{repo}/actions/runs/{run_id}\"],\n deleteWorkflowRunLogs: [\"DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs\"],\n disableSelectedRepositoryGithubActionsOrganization: [\"DELETE /orgs/{org}/actions/permissions/repositories/{repository_id}\"],\n disableWorkflow: [\"PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/disable\"],\n downloadArtifact: [\"GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}\"],\n downloadJobLogsForWorkflowRun: [\"GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs\"],\n downloadWorkflowRunLogs: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs\"],\n enableSelectedRepositoryGithubActionsOrganization: [\"PUT /orgs/{org}/actions/permissions/repositories/{repository_id}\"],\n enableWorkflow: [\"PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/enable\"],\n getAllowedActionsOrganization: [\"GET /orgs/{org}/actions/permissions/selected-actions\"],\n getAllowedActionsRepository: [\"GET /repos/{owner}/{repo}/actions/permissions/selected-actions\"],\n getArtifact: [\"GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}\"],\n getEnvironmentPublicKey: [\"GET /repositories/{repository_id}/environments/{environment_name}/secrets/public-key\"],\n getEnvironmentSecret: [\"GET /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}\"],\n getGithubActionsPermissionsOrganization: [\"GET /orgs/{org}/actions/permissions\"],\n getGithubActionsPermissionsRepository: [\"GET /repos/{owner}/{repo}/actions/permissions\"],\n getJobForWorkflowRun: [\"GET /repos/{owner}/{repo}/actions/jobs/{job_id}\"],\n getOrgPublicKey: [\"GET /orgs/{org}/actions/secrets/public-key\"],\n getOrgSecret: [\"GET /orgs/{org}/actions/secrets/{secret_name}\"],\n getPendingDeploymentsForRun: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments\"],\n getRepoPermissions: [\"GET /repos/{owner}/{repo}/actions/permissions\", {}, {\n renamed: [\"actions\", \"getGithubActionsPermissionsRepository\"]\n }],\n getRepoPublicKey: [\"GET /repos/{owner}/{repo}/actions/secrets/public-key\"],\n getRepoSecret: [\"GET /repos/{owner}/{repo}/actions/secrets/{secret_name}\"],\n getReviewsForRun: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}/approvals\"],\n getSelfHostedRunnerForOrg: [\"GET /orgs/{org}/actions/runners/{runner_id}\"],\n getSelfHostedRunnerForRepo: [\"GET /repos/{owner}/{repo}/actions/runners/{runner_id}\"],\n getWorkflow: [\"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}\"],\n getWorkflowRun: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}\"],\n getWorkflowRunUsage: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing\"],\n getWorkflowUsage: [\"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing\"],\n listArtifactsForRepo: [\"GET /repos/{owner}/{repo}/actions/artifacts\"],\n listEnvironmentSecrets: [\"GET /repositories/{repository_id}/environments/{environment_name}/secrets\"],\n listJobsForWorkflowRun: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs\"],\n listOrgSecrets: [\"GET /orgs/{org}/actions/secrets\"],\n listRepoSecrets: [\"GET /repos/{owner}/{repo}/actions/secrets\"],\n listRepoWorkflows: [\"GET /repos/{owner}/{repo}/actions/workflows\"],\n listRunnerApplicationsForOrg: [\"GET /orgs/{org}/actions/runners/downloads\"],\n listRunnerApplicationsForRepo: [\"GET /repos/{owner}/{repo}/actions/runners/downloads\"],\n listSelectedReposForOrgSecret: [\"GET /orgs/{org}/actions/secrets/{secret_name}/repositories\"],\n listSelectedRepositoriesEnabledGithubActionsOrganization: [\"GET /orgs/{org}/actions/permissions/repositories\"],\n listSelfHostedRunnersForOrg: [\"GET /orgs/{org}/actions/runners\"],\n listSelfHostedRunnersForRepo: [\"GET /repos/{owner}/{repo}/actions/runners\"],\n listWorkflowRunArtifacts: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts\"],\n listWorkflowRuns: [\"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs\"],\n listWorkflowRunsForRepo: [\"GET /repos/{owner}/{repo}/actions/runs\"],\n reRunWorkflow: [\"POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun\"],\n removeSelectedRepoFromOrgSecret: [\"DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}\"],\n reviewPendingDeploymentsForRun: [\"POST /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments\"],\n setAllowedActionsOrganization: [\"PUT /orgs/{org}/actions/permissions/selected-actions\"],\n setAllowedActionsRepository: [\"PUT /repos/{owner}/{repo}/actions/permissions/selected-actions\"],\n setGithubActionsPermissionsOrganization: [\"PUT /orgs/{org}/actions/permissions\"],\n setGithubActionsPermissionsRepository: [\"PUT /repos/{owner}/{repo}/actions/permissions\"],\n setSelectedReposForOrgSecret: [\"PUT /orgs/{org}/actions/secrets/{secret_name}/repositories\"],\n setSelectedRepositoriesEnabledGithubActionsOrganization: [\"PUT /orgs/{org}/actions/permissions/repositories\"]\n },\n activity: {\n checkRepoIsStarredByAuthenticatedUser: [\"GET /user/starred/{owner}/{repo}\"],\n deleteRepoSubscription: [\"DELETE /repos/{owner}/{repo}/subscription\"],\n deleteThreadSubscription: [\"DELETE /notifications/threads/{thread_id}/subscription\"],\n getFeeds: [\"GET /feeds\"],\n getRepoSubscription: [\"GET /repos/{owner}/{repo}/subscription\"],\n getThread: [\"GET /notifications/threads/{thread_id}\"],\n getThreadSubscriptionForAuthenticatedUser: [\"GET /notifications/threads/{thread_id}/subscription\"],\n listEventsForAuthenticatedUser: [\"GET /users/{username}/events\"],\n listNotificationsForAuthenticatedUser: [\"GET /notifications\"],\n listOrgEventsForAuthenticatedUser: [\"GET /users/{username}/events/orgs/{org}\"],\n listPublicEvents: [\"GET /events\"],\n listPublicEventsForRepoNetwork: [\"GET /networks/{owner}/{repo}/events\"],\n listPublicEventsForUser: [\"GET /users/{username}/events/public\"],\n listPublicOrgEvents: [\"GET /orgs/{org}/events\"],\n listReceivedEventsForUser: [\"GET /users/{username}/received_events\"],\n listReceivedPublicEventsForUser: [\"GET /users/{username}/received_events/public\"],\n listRepoEvents: [\"GET /repos/{owner}/{repo}/events\"],\n listRepoNotificationsForAuthenticatedUser: [\"GET /repos/{owner}/{repo}/notifications\"],\n listReposStarredByAuthenticatedUser: [\"GET /user/starred\"],\n listReposStarredByUser: [\"GET /users/{username}/starred\"],\n listReposWatchedByUser: [\"GET /users/{username}/subscriptions\"],\n listStargazersForRepo: [\"GET /repos/{owner}/{repo}/stargazers\"],\n listWatchedReposForAuthenticatedUser: [\"GET /user/subscriptions\"],\n listWatchersForRepo: [\"GET /repos/{owner}/{repo}/subscribers\"],\n markNotificationsAsRead: [\"PUT /notifications\"],\n markRepoNotificationsAsRead: [\"PUT /repos/{owner}/{repo}/notifications\"],\n markThreadAsRead: [\"PATCH /notifications/threads/{thread_id}\"],\n setRepoSubscription: [\"PUT /repos/{owner}/{repo}/subscription\"],\n setThreadSubscription: [\"PUT /notifications/threads/{thread_id}/subscription\"],\n starRepoForAuthenticatedUser: [\"PUT /user/starred/{owner}/{repo}\"],\n unstarRepoForAuthenticatedUser: [\"DELETE /user/starred/{owner}/{repo}\"]\n },\n apps: {\n addRepoToInstallation: [\"PUT /user/installations/{installation_id}/repositories/{repository_id}\"],\n checkToken: [\"POST /applications/{client_id}/token\"],\n createContentAttachment: [\"POST /content_references/{content_reference_id}/attachments\", {\n mediaType: {\n previews: [\"corsair\"]\n }\n }],\n createContentAttachmentForRepo: [\"POST /repos/{owner}/{repo}/content_references/{content_reference_id}/attachments\", {\n mediaType: {\n previews: [\"corsair\"]\n }\n }],\n createFromManifest: [\"POST /app-manifests/{code}/conversions\"],\n createInstallationAccessToken: [\"POST /app/installations/{installation_id}/access_tokens\"],\n deleteAuthorization: [\"DELETE /applications/{client_id}/grant\"],\n deleteInstallation: [\"DELETE /app/installations/{installation_id}\"],\n deleteToken: [\"DELETE /applications/{client_id}/token\"],\n getAuthenticated: [\"GET /app\"],\n getBySlug: [\"GET /apps/{app_slug}\"],\n getInstallation: [\"GET /app/installations/{installation_id}\"],\n getOrgInstallation: [\"GET /orgs/{org}/installation\"],\n getRepoInstallation: [\"GET /repos/{owner}/{repo}/installation\"],\n getSubscriptionPlanForAccount: [\"GET /marketplace_listing/accounts/{account_id}\"],\n getSubscriptionPlanForAccountStubbed: [\"GET /marketplace_listing/stubbed/accounts/{account_id}\"],\n getUserInstallation: [\"GET /users/{username}/installation\"],\n getWebhookConfigForApp: [\"GET /app/hook/config\"],\n listAccountsForPlan: [\"GET /marketplace_listing/plans/{plan_id}/accounts\"],\n listAccountsForPlanStubbed: [\"GET /marketplace_listing/stubbed/plans/{plan_id}/accounts\"],\n listInstallationReposForAuthenticatedUser: [\"GET /user/installations/{installation_id}/repositories\"],\n listInstallations: [\"GET /app/installations\"],\n listInstallationsForAuthenticatedUser: [\"GET /user/installations\"],\n listPlans: [\"GET /marketplace_listing/plans\"],\n listPlansStubbed: [\"GET /marketplace_listing/stubbed/plans\"],\n listReposAccessibleToInstallation: [\"GET /installation/repositories\"],\n listSubscriptionsForAuthenticatedUser: [\"GET /user/marketplace_purchases\"],\n listSubscriptionsForAuthenticatedUserStubbed: [\"GET /user/marketplace_purchases/stubbed\"],\n removeRepoFromInstallation: [\"DELETE /user/installations/{installation_id}/repositories/{repository_id}\"],\n resetToken: [\"PATCH /applications/{client_id}/token\"],\n revokeInstallationAccessToken: [\"DELETE /installation/token\"],\n scopeToken: [\"POST /applications/{client_id}/token/scoped\"],\n suspendInstallation: [\"PUT /app/installations/{installation_id}/suspended\"],\n unsuspendInstallation: [\"DELETE /app/installations/{installation_id}/suspended\"],\n updateWebhookConfigForApp: [\"PATCH /app/hook/config\"]\n },\n billing: {\n getGithubActionsBillingOrg: [\"GET /orgs/{org}/settings/billing/actions\"],\n getGithubActionsBillingUser: [\"GET /users/{username}/settings/billing/actions\"],\n getGithubPackagesBillingOrg: [\"GET /orgs/{org}/settings/billing/packages\"],\n getGithubPackagesBillingUser: [\"GET /users/{username}/settings/billing/packages\"],\n getSharedStorageBillingOrg: [\"GET /orgs/{org}/settings/billing/shared-storage\"],\n getSharedStorageBillingUser: [\"GET /users/{username}/settings/billing/shared-storage\"]\n },\n checks: {\n create: [\"POST /repos/{owner}/{repo}/check-runs\"],\n createSuite: [\"POST /repos/{owner}/{repo}/check-suites\"],\n get: [\"GET /repos/{owner}/{repo}/check-runs/{check_run_id}\"],\n getSuite: [\"GET /repos/{owner}/{repo}/check-suites/{check_suite_id}\"],\n listAnnotations: [\"GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations\"],\n listForRef: [\"GET /repos/{owner}/{repo}/commits/{ref}/check-runs\"],\n listForSuite: [\"GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs\"],\n listSuitesForRef: [\"GET /repos/{owner}/{repo}/commits/{ref}/check-suites\"],\n rerequestSuite: [\"POST /repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest\"],\n setSuitesPreferences: [\"PATCH /repos/{owner}/{repo}/check-suites/preferences\"],\n update: [\"PATCH /repos/{owner}/{repo}/check-runs/{check_run_id}\"]\n },\n codeScanning: {\n deleteAnalysis: [\"DELETE /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}{?confirm_delete}\"],\n getAlert: [\"GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}\", {}, {\n renamedParameters: {\n alert_id: \"alert_number\"\n }\n }],\n getAnalysis: [\"GET /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}\"],\n getSarif: [\"GET /repos/{owner}/{repo}/code-scanning/sarifs/{sarif_id}\"],\n listAlertInstances: [\"GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances\"],\n listAlertsForRepo: [\"GET /repos/{owner}/{repo}/code-scanning/alerts\"],\n listAlertsInstances: [\"GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances\", {}, {\n renamed: [\"codeScanning\", \"listAlertInstances\"]\n }],\n listRecentAnalyses: [\"GET /repos/{owner}/{repo}/code-scanning/analyses\"],\n updateAlert: [\"PATCH /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}\"],\n uploadSarif: [\"POST /repos/{owner}/{repo}/code-scanning/sarifs\"]\n },\n codesOfConduct: {\n getAllCodesOfConduct: [\"GET /codes_of_conduct\", {\n mediaType: {\n previews: [\"scarlet-witch\"]\n }\n }],\n getConductCode: [\"GET /codes_of_conduct/{key}\", {\n mediaType: {\n previews: [\"scarlet-witch\"]\n }\n }],\n getForRepo: [\"GET /repos/{owner}/{repo}/community/code_of_conduct\", {\n mediaType: {\n previews: [\"scarlet-witch\"]\n }\n }]\n },\n emojis: {\n get: [\"GET /emojis\"]\n },\n enterpriseAdmin: {\n disableSelectedOrganizationGithubActionsEnterprise: [\"DELETE /enterprises/{enterprise}/actions/permissions/organizations/{org_id}\"],\n enableSelectedOrganizationGithubActionsEnterprise: [\"PUT /enterprises/{enterprise}/actions/permissions/organizations/{org_id}\"],\n getAllowedActionsEnterprise: [\"GET /enterprises/{enterprise}/actions/permissions/selected-actions\"],\n getGithubActionsPermissionsEnterprise: [\"GET /enterprises/{enterprise}/actions/permissions\"],\n listSelectedOrganizationsEnabledGithubActionsEnterprise: [\"GET /enterprises/{enterprise}/actions/permissions/organizations\"],\n setAllowedActionsEnterprise: [\"PUT /enterprises/{enterprise}/actions/permissions/selected-actions\"],\n setGithubActionsPermissionsEnterprise: [\"PUT /enterprises/{enterprise}/actions/permissions\"],\n setSelectedOrganizationsEnabledGithubActionsEnterprise: [\"PUT /enterprises/{enterprise}/actions/permissions/organizations\"]\n },\n gists: {\n checkIsStarred: [\"GET /gists/{gist_id}/star\"],\n create: [\"POST /gists\"],\n createComment: [\"POST /gists/{gist_id}/comments\"],\n delete: [\"DELETE /gists/{gist_id}\"],\n deleteComment: [\"DELETE /gists/{gist_id}/comments/{comment_id}\"],\n fork: [\"POST /gists/{gist_id}/forks\"],\n get: [\"GET /gists/{gist_id}\"],\n getComment: [\"GET /gists/{gist_id}/comments/{comment_id}\"],\n getRevision: [\"GET /gists/{gist_id}/{sha}\"],\n list: [\"GET /gists\"],\n listComments: [\"GET /gists/{gist_id}/comments\"],\n listCommits: [\"GET /gists/{gist_id}/commits\"],\n listForUser: [\"GET /users/{username}/gists\"],\n listForks: [\"GET /gists/{gist_id}/forks\"],\n listPublic: [\"GET /gists/public\"],\n listStarred: [\"GET /gists/starred\"],\n star: [\"PUT /gists/{gist_id}/star\"],\n unstar: [\"DELETE /gists/{gist_id}/star\"],\n update: [\"PATCH /gists/{gist_id}\"],\n updateComment: [\"PATCH /gists/{gist_id}/comments/{comment_id}\"]\n },\n git: {\n createBlob: [\"POST /repos/{owner}/{repo}/git/blobs\"],\n createCommit: [\"POST /repos/{owner}/{repo}/git/commits\"],\n createRef: [\"POST /repos/{owner}/{repo}/git/refs\"],\n createTag: [\"POST /repos/{owner}/{repo}/git/tags\"],\n createTree: [\"POST /repos/{owner}/{repo}/git/trees\"],\n deleteRef: [\"DELETE /repos/{owner}/{repo}/git/refs/{ref}\"],\n getBlob: [\"GET /repos/{owner}/{repo}/git/blobs/{file_sha}\"],\n getCommit: [\"GET /repos/{owner}/{repo}/git/commits/{commit_sha}\"],\n getRef: [\"GET /repos/{owner}/{repo}/git/ref/{ref}\"],\n getTag: [\"GET /repos/{owner}/{repo}/git/tags/{tag_sha}\"],\n getTree: [\"GET /repos/{owner}/{repo}/git/trees/{tree_sha}\"],\n listMatchingRefs: [\"GET /repos/{owner}/{repo}/git/matching-refs/{ref}\"],\n updateRef: [\"PATCH /repos/{owner}/{repo}/git/refs/{ref}\"]\n },\n gitignore: {\n getAllTemplates: [\"GET /gitignore/templates\"],\n getTemplate: [\"GET /gitignore/templates/{name}\"]\n },\n interactions: {\n getRestrictionsForAuthenticatedUser: [\"GET /user/interaction-limits\"],\n getRestrictionsForOrg: [\"GET /orgs/{org}/interaction-limits\"],\n getRestrictionsForRepo: [\"GET /repos/{owner}/{repo}/interaction-limits\"],\n getRestrictionsForYourPublicRepos: [\"GET /user/interaction-limits\", {}, {\n renamed: [\"interactions\", \"getRestrictionsForAuthenticatedUser\"]\n }],\n removeRestrictionsForAuthenticatedUser: [\"DELETE /user/interaction-limits\"],\n removeRestrictionsForOrg: [\"DELETE /orgs/{org}/interaction-limits\"],\n removeRestrictionsForRepo: [\"DELETE /repos/{owner}/{repo}/interaction-limits\"],\n removeRestrictionsForYourPublicRepos: [\"DELETE /user/interaction-limits\", {}, {\n renamed: [\"interactions\", \"removeRestrictionsForAuthenticatedUser\"]\n }],\n setRestrictionsForAuthenticatedUser: [\"PUT /user/interaction-limits\"],\n setRestrictionsForOrg: [\"PUT /orgs/{org}/interaction-limits\"],\n setRestrictionsForRepo: [\"PUT /repos/{owner}/{repo}/interaction-limits\"],\n setRestrictionsForYourPublicRepos: [\"PUT /user/interaction-limits\", {}, {\n renamed: [\"interactions\", \"setRestrictionsForAuthenticatedUser\"]\n }]\n },\n issues: {\n addAssignees: [\"POST /repos/{owner}/{repo}/issues/{issue_number}/assignees\"],\n addLabels: [\"POST /repos/{owner}/{repo}/issues/{issue_number}/labels\"],\n checkUserCanBeAssigned: [\"GET /repos/{owner}/{repo}/assignees/{assignee}\"],\n create: [\"POST /repos/{owner}/{repo}/issues\"],\n createComment: [\"POST /repos/{owner}/{repo}/issues/{issue_number}/comments\"],\n createLabel: [\"POST /repos/{owner}/{repo}/labels\"],\n createMilestone: [\"POST /repos/{owner}/{repo}/milestones\"],\n deleteComment: [\"DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}\"],\n deleteLabel: [\"DELETE /repos/{owner}/{repo}/labels/{name}\"],\n deleteMilestone: [\"DELETE /repos/{owner}/{repo}/milestones/{milestone_number}\"],\n get: [\"GET /repos/{owner}/{repo}/issues/{issue_number}\"],\n getComment: [\"GET /repos/{owner}/{repo}/issues/comments/{comment_id}\"],\n getEvent: [\"GET /repos/{owner}/{repo}/issues/events/{event_id}\"],\n getLabel: [\"GET /repos/{owner}/{repo}/labels/{name}\"],\n getMilestone: [\"GET /repos/{owner}/{repo}/milestones/{milestone_number}\"],\n list: [\"GET /issues\"],\n listAssignees: [\"GET /repos/{owner}/{repo}/assignees\"],\n listComments: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/comments\"],\n listCommentsForRepo: [\"GET /repos/{owner}/{repo}/issues/comments\"],\n listEvents: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/events\"],\n listEventsForRepo: [\"GET /repos/{owner}/{repo}/issues/events\"],\n listEventsForTimeline: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/timeline\", {\n mediaType: {\n previews: [\"mockingbird\"]\n }\n }],\n listForAuthenticatedUser: [\"GET /user/issues\"],\n listForOrg: [\"GET /orgs/{org}/issues\"],\n listForRepo: [\"GET /repos/{owner}/{repo}/issues\"],\n listLabelsForMilestone: [\"GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels\"],\n listLabelsForRepo: [\"GET /repos/{owner}/{repo}/labels\"],\n listLabelsOnIssue: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/labels\"],\n listMilestones: [\"GET /repos/{owner}/{repo}/milestones\"],\n lock: [\"PUT /repos/{owner}/{repo}/issues/{issue_number}/lock\"],\n removeAllLabels: [\"DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels\"],\n removeAssignees: [\"DELETE /repos/{owner}/{repo}/issues/{issue_number}/assignees\"],\n removeLabel: [\"DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels/{name}\"],\n setLabels: [\"PUT /repos/{owner}/{repo}/issues/{issue_number}/labels\"],\n unlock: [\"DELETE /repos/{owner}/{repo}/issues/{issue_number}/lock\"],\n update: [\"PATCH /repos/{owner}/{repo}/issues/{issue_number}\"],\n updateComment: [\"PATCH /repos/{owner}/{repo}/issues/comments/{comment_id}\"],\n updateLabel: [\"PATCH /repos/{owner}/{repo}/labels/{name}\"],\n updateMilestone: [\"PATCH /repos/{owner}/{repo}/milestones/{milestone_number}\"]\n },\n licenses: {\n get: [\"GET /licenses/{license}\"],\n getAllCommonlyUsed: [\"GET /licenses\"],\n getForRepo: [\"GET /repos/{owner}/{repo}/license\"]\n },\n markdown: {\n render: [\"POST /markdown\"],\n renderRaw: [\"POST /markdown/raw\", {\n headers: {\n \"content-type\": \"text/plain; charset=utf-8\"\n }\n }]\n },\n meta: {\n get: [\"GET /meta\"],\n getOctocat: [\"GET /octocat\"],\n getZen: [\"GET /zen\"],\n root: [\"GET /\"]\n },\n migrations: {\n cancelImport: [\"DELETE /repos/{owner}/{repo}/import\"],\n deleteArchiveForAuthenticatedUser: [\"DELETE /user/migrations/{migration_id}/archive\", {\n mediaType: {\n previews: [\"wyandotte\"]\n }\n }],\n deleteArchiveForOrg: [\"DELETE /orgs/{org}/migrations/{migration_id}/archive\", {\n mediaType: {\n previews: [\"wyandotte\"]\n }\n }],\n downloadArchiveForOrg: [\"GET /orgs/{org}/migrations/{migration_id}/archive\", {\n mediaType: {\n previews: [\"wyandotte\"]\n }\n }],\n getArchiveForAuthenticatedUser: [\"GET /user/migrations/{migration_id}/archive\", {\n mediaType: {\n previews: [\"wyandotte\"]\n }\n }],\n getCommitAuthors: [\"GET /repos/{owner}/{repo}/import/authors\"],\n getImportStatus: [\"GET /repos/{owner}/{repo}/import\"],\n getLargeFiles: [\"GET /repos/{owner}/{repo}/import/large_files\"],\n getStatusForAuthenticatedUser: [\"GET /user/migrations/{migration_id}\", {\n mediaType: {\n previews: [\"wyandotte\"]\n }\n }],\n getStatusForOrg: [\"GET /orgs/{org}/migrations/{migration_id}\", {\n mediaType: {\n previews: [\"wyandotte\"]\n }\n }],\n listForAuthenticatedUser: [\"GET /user/migrations\", {\n mediaType: {\n previews: [\"wyandotte\"]\n }\n }],\n listForOrg: [\"GET /orgs/{org}/migrations\", {\n mediaType: {\n previews: [\"wyandotte\"]\n }\n }],\n listReposForOrg: [\"GET /orgs/{org}/migrations/{migration_id}/repositories\", {\n mediaType: {\n previews: [\"wyandotte\"]\n }\n }],\n listReposForUser: [\"GET /user/migrations/{migration_id}/repositories\", {\n mediaType: {\n previews: [\"wyandotte\"]\n }\n }],\n mapCommitAuthor: [\"PATCH /repos/{owner}/{repo}/import/authors/{author_id}\"],\n setLfsPreference: [\"PATCH /repos/{owner}/{repo}/import/lfs\"],\n startForAuthenticatedUser: [\"POST /user/migrations\"],\n startForOrg: [\"POST /orgs/{org}/migrations\"],\n startImport: [\"PUT /repos/{owner}/{repo}/import\"],\n unlockRepoForAuthenticatedUser: [\"DELETE /user/migrations/{migration_id}/repos/{repo_name}/lock\", {\n mediaType: {\n previews: [\"wyandotte\"]\n }\n }],\n unlockRepoForOrg: [\"DELETE /orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock\", {\n mediaType: {\n previews: [\"wyandotte\"]\n }\n }],\n updateImport: [\"PATCH /repos/{owner}/{repo}/import\"]\n },\n orgs: {\n blockUser: [\"PUT /orgs/{org}/blocks/{username}\"],\n cancelInvitation: [\"DELETE /orgs/{org}/invitations/{invitation_id}\"],\n checkBlockedUser: [\"GET /orgs/{org}/blocks/{username}\"],\n checkMembershipForUser: [\"GET /orgs/{org}/members/{username}\"],\n checkPublicMembershipForUser: [\"GET /orgs/{org}/public_members/{username}\"],\n convertMemberToOutsideCollaborator: [\"PUT /orgs/{org}/outside_collaborators/{username}\"],\n createInvitation: [\"POST /orgs/{org}/invitations\"],\n createWebhook: [\"POST /orgs/{org}/hooks\"],\n deleteWebhook: [\"DELETE /orgs/{org}/hooks/{hook_id}\"],\n get: [\"GET /orgs/{org}\"],\n getMembershipForAuthenticatedUser: [\"GET /user/memberships/orgs/{org}\"],\n getMembershipForUser: [\"GET /orgs/{org}/memberships/{username}\"],\n getWebhook: [\"GET /orgs/{org}/hooks/{hook_id}\"],\n getWebhookConfigForOrg: [\"GET /orgs/{org}/hooks/{hook_id}/config\"],\n list: [\"GET /organizations\"],\n listAppInstallations: [\"GET /orgs/{org}/installations\"],\n listBlockedUsers: [\"GET /orgs/{org}/blocks\"],\n listFailedInvitations: [\"GET /orgs/{org}/failed_invitations\"],\n listForAuthenticatedUser: [\"GET /user/orgs\"],\n listForUser: [\"GET /users/{username}/orgs\"],\n listInvitationTeams: [\"GET /orgs/{org}/invitations/{invitation_id}/teams\"],\n listMembers: [\"GET /orgs/{org}/members\"],\n listMembershipsForAuthenticatedUser: [\"GET /user/memberships/orgs\"],\n listOutsideCollaborators: [\"GET /orgs/{org}/outside_collaborators\"],\n listPendingInvitations: [\"GET /orgs/{org}/invitations\"],\n listPublicMembers: [\"GET /orgs/{org}/public_members\"],\n listWebhooks: [\"GET /orgs/{org}/hooks\"],\n pingWebhook: [\"POST /orgs/{org}/hooks/{hook_id}/pings\"],\n removeMember: [\"DELETE /orgs/{org}/members/{username}\"],\n removeMembershipForUser: [\"DELETE /orgs/{org}/memberships/{username}\"],\n removeOutsideCollaborator: [\"DELETE /orgs/{org}/outside_collaborators/{username}\"],\n removePublicMembershipForAuthenticatedUser: [\"DELETE /orgs/{org}/public_members/{username}\"],\n setMembershipForUser: [\"PUT /orgs/{org}/memberships/{username}\"],\n setPublicMembershipForAuthenticatedUser: [\"PUT /orgs/{org}/public_members/{username}\"],\n unblockUser: [\"DELETE /orgs/{org}/blocks/{username}\"],\n update: [\"PATCH /orgs/{org}\"],\n updateMembershipForAuthenticatedUser: [\"PATCH /user/memberships/orgs/{org}\"],\n updateWebhook: [\"PATCH /orgs/{org}/hooks/{hook_id}\"],\n updateWebhookConfigForOrg: [\"PATCH /orgs/{org}/hooks/{hook_id}/config\"]\n },\n packages: {\n deletePackageForAuthenticatedUser: [\"DELETE /user/packages/{package_type}/{package_name}\"],\n deletePackageForOrg: [\"DELETE /orgs/{org}/packages/{package_type}/{package_name}\"],\n deletePackageVersionForAuthenticatedUser: [\"DELETE /user/packages/{package_type}/{package_name}/versions/{package_version_id}\"],\n deletePackageVersionForOrg: [\"DELETE /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}\"],\n getAllPackageVersionsForAPackageOwnedByAnOrg: [\"GET /orgs/{org}/packages/{package_type}/{package_name}/versions\", {}, {\n renamed: [\"packages\", \"getAllPackageVersionsForPackageOwnedByOrg\"]\n }],\n getAllPackageVersionsForAPackageOwnedByTheAuthenticatedUser: [\"GET /user/packages/{package_type}/{package_name}/versions\", {}, {\n renamed: [\"packages\", \"getAllPackageVersionsForPackageOwnedByAuthenticatedUser\"]\n }],\n getAllPackageVersionsForPackageOwnedByAuthenticatedUser: [\"GET /user/packages/{package_type}/{package_name}/versions\"],\n getAllPackageVersionsForPackageOwnedByOrg: [\"GET /orgs/{org}/packages/{package_type}/{package_name}/versions\"],\n getAllPackageVersionsForPackageOwnedByUser: [\"GET /users/{username}/packages/{package_type}/{package_name}/versions\"],\n getPackageForAuthenticatedUser: [\"GET /user/packages/{package_type}/{package_name}\"],\n getPackageForOrganization: [\"GET /orgs/{org}/packages/{package_type}/{package_name}\"],\n getPackageForUser: [\"GET /users/{username}/packages/{package_type}/{package_name}\"],\n getPackageVersionForAuthenticatedUser: [\"GET /user/packages/{package_type}/{package_name}/versions/{package_version_id}\"],\n getPackageVersionForOrganization: [\"GET /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}\"],\n getPackageVersionForUser: [\"GET /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}\"],\n restorePackageForAuthenticatedUser: [\"POST /user/packages/{package_type}/{package_name}/restore{?token}\"],\n restorePackageForOrg: [\"POST /orgs/{org}/packages/{package_type}/{package_name}/restore{?token}\"],\n restorePackageVersionForAuthenticatedUser: [\"POST /user/packages/{package_type}/{package_name}/versions/{package_version_id}/restore\"],\n restorePackageVersionForOrg: [\"POST /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore\"]\n },\n projects: {\n addCollaborator: [\"PUT /projects/{project_id}/collaborators/{username}\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n createCard: [\"POST /projects/columns/{column_id}/cards\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n createColumn: [\"POST /projects/{project_id}/columns\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n createForAuthenticatedUser: [\"POST /user/projects\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n createForOrg: [\"POST /orgs/{org}/projects\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n createForRepo: [\"POST /repos/{owner}/{repo}/projects\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n delete: [\"DELETE /projects/{project_id}\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n deleteCard: [\"DELETE /projects/columns/cards/{card_id}\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n deleteColumn: [\"DELETE /projects/columns/{column_id}\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n get: [\"GET /projects/{project_id}\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n getCard: [\"GET /projects/columns/cards/{card_id}\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n getColumn: [\"GET /projects/columns/{column_id}\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n getPermissionForUser: [\"GET /projects/{project_id}/collaborators/{username}/permission\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n listCards: [\"GET /projects/columns/{column_id}/cards\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n listCollaborators: [\"GET /projects/{project_id}/collaborators\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n listColumns: [\"GET /projects/{project_id}/columns\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n listForOrg: [\"GET /orgs/{org}/projects\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n listForRepo: [\"GET /repos/{owner}/{repo}/projects\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n listForUser: [\"GET /users/{username}/projects\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n moveCard: [\"POST /projects/columns/cards/{card_id}/moves\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n moveColumn: [\"POST /projects/columns/{column_id}/moves\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n removeCollaborator: [\"DELETE /projects/{project_id}/collaborators/{username}\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n update: [\"PATCH /projects/{project_id}\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n updateCard: [\"PATCH /projects/columns/cards/{card_id}\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n updateColumn: [\"PATCH /projects/columns/{column_id}\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }]\n },\n pulls: {\n checkIfMerged: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/merge\"],\n create: [\"POST /repos/{owner}/{repo}/pulls\"],\n createReplyForReviewComment: [\"POST /repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies\"],\n createReview: [\"POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews\"],\n createReviewComment: [\"POST /repos/{owner}/{repo}/pulls/{pull_number}/comments\"],\n deletePendingReview: [\"DELETE /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}\"],\n deleteReviewComment: [\"DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}\"],\n dismissReview: [\"PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals\"],\n get: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}\"],\n getReview: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}\"],\n getReviewComment: [\"GET /repos/{owner}/{repo}/pulls/comments/{comment_id}\"],\n list: [\"GET /repos/{owner}/{repo}/pulls\"],\n listCommentsForReview: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments\"],\n listCommits: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/commits\"],\n listFiles: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/files\"],\n listRequestedReviewers: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers\"],\n listReviewComments: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/comments\"],\n listReviewCommentsForRepo: [\"GET /repos/{owner}/{repo}/pulls/comments\"],\n listReviews: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews\"],\n merge: [\"PUT /repos/{owner}/{repo}/pulls/{pull_number}/merge\"],\n removeRequestedReviewers: [\"DELETE /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers\"],\n requestReviewers: [\"POST /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers\"],\n submitReview: [\"POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events\"],\n update: [\"PATCH /repos/{owner}/{repo}/pulls/{pull_number}\"],\n updateBranch: [\"PUT /repos/{owner}/{repo}/pulls/{pull_number}/update-branch\", {\n mediaType: {\n previews: [\"lydian\"]\n }\n }],\n updateReview: [\"PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}\"],\n updateReviewComment: [\"PATCH /repos/{owner}/{repo}/pulls/comments/{comment_id}\"]\n },\n rateLimit: {\n get: [\"GET /rate_limit\"]\n },\n reactions: {\n createForCommitComment: [\"POST /repos/{owner}/{repo}/comments/{comment_id}/reactions\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }],\n createForIssue: [\"POST /repos/{owner}/{repo}/issues/{issue_number}/reactions\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }],\n createForIssueComment: [\"POST /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }],\n createForPullRequestReviewComment: [\"POST /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }],\n createForRelease: [\"POST /repos/{owner}/{repo}/releases/{release_id}/reactions\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }],\n createForTeamDiscussionCommentInOrg: [\"POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }],\n createForTeamDiscussionInOrg: [\"POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }],\n deleteForCommitComment: [\"DELETE /repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }],\n deleteForIssue: [\"DELETE /repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }],\n deleteForIssueComment: [\"DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }],\n deleteForPullRequestComment: [\"DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }],\n deleteForTeamDiscussion: [\"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }],\n deleteForTeamDiscussionComment: [\"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }],\n deleteLegacy: [\"DELETE /reactions/{reaction_id}\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }, {\n deprecated: \"octokit.rest.reactions.deleteLegacy() is deprecated, see https://docs.github.com/rest/reference/reactions/#delete-a-reaction-legacy\"\n }],\n listForCommitComment: [\"GET /repos/{owner}/{repo}/comments/{comment_id}/reactions\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }],\n listForIssue: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/reactions\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }],\n listForIssueComment: [\"GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }],\n listForPullRequestReviewComment: [\"GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }],\n listForTeamDiscussionCommentInOrg: [\"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }],\n listForTeamDiscussionInOrg: [\"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions\", {\n mediaType: {\n previews: [\"squirrel-girl\"]\n }\n }]\n },\n repos: {\n acceptInvitation: [\"PATCH /user/repository_invitations/{invitation_id}\"],\n addAppAccessRestrictions: [\"POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\", {}, {\n mapToData: \"apps\"\n }],\n addCollaborator: [\"PUT /repos/{owner}/{repo}/collaborators/{username}\"],\n addStatusCheckContexts: [\"POST /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\", {}, {\n mapToData: \"contexts\"\n }],\n addTeamAccessRestrictions: [\"POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\", {}, {\n mapToData: \"teams\"\n }],\n addUserAccessRestrictions: [\"POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\", {}, {\n mapToData: \"users\"\n }],\n checkCollaborator: [\"GET /repos/{owner}/{repo}/collaborators/{username}\"],\n checkVulnerabilityAlerts: [\"GET /repos/{owner}/{repo}/vulnerability-alerts\", {\n mediaType: {\n previews: [\"dorian\"]\n }\n }],\n compareCommits: [\"GET /repos/{owner}/{repo}/compare/{base}...{head}\"],\n compareCommitsWithBasehead: [\"GET /repos/{owner}/{repo}/compare/{basehead}\"],\n createCommitComment: [\"POST /repos/{owner}/{repo}/commits/{commit_sha}/comments\"],\n createCommitSignatureProtection: [\"POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures\", {\n mediaType: {\n previews: [\"zzzax\"]\n }\n }],\n createCommitStatus: [\"POST /repos/{owner}/{repo}/statuses/{sha}\"],\n createDeployKey: [\"POST /repos/{owner}/{repo}/keys\"],\n createDeployment: [\"POST /repos/{owner}/{repo}/deployments\"],\n createDeploymentStatus: [\"POST /repos/{owner}/{repo}/deployments/{deployment_id}/statuses\"],\n createDispatchEvent: [\"POST /repos/{owner}/{repo}/dispatches\"],\n createForAuthenticatedUser: [\"POST /user/repos\"],\n createFork: [\"POST /repos/{owner}/{repo}/forks\"],\n createInOrg: [\"POST /orgs/{org}/repos\"],\n createOrUpdateEnvironment: [\"PUT /repos/{owner}/{repo}/environments/{environment_name}\"],\n createOrUpdateFileContents: [\"PUT /repos/{owner}/{repo}/contents/{path}\"],\n createPagesSite: [\"POST /repos/{owner}/{repo}/pages\", {\n mediaType: {\n previews: [\"switcheroo\"]\n }\n }],\n createRelease: [\"POST /repos/{owner}/{repo}/releases\"],\n createUsingTemplate: [\"POST /repos/{template_owner}/{template_repo}/generate\", {\n mediaType: {\n previews: [\"baptiste\"]\n }\n }],\n createWebhook: [\"POST /repos/{owner}/{repo}/hooks\"],\n declineInvitation: [\"DELETE /user/repository_invitations/{invitation_id}\"],\n delete: [\"DELETE /repos/{owner}/{repo}\"],\n deleteAccessRestrictions: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions\"],\n deleteAdminBranchProtection: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins\"],\n deleteAnEnvironment: [\"DELETE /repos/{owner}/{repo}/environments/{environment_name}\"],\n deleteBranchProtection: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection\"],\n deleteCommitComment: [\"DELETE /repos/{owner}/{repo}/comments/{comment_id}\"],\n deleteCommitSignatureProtection: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures\", {\n mediaType: {\n previews: [\"zzzax\"]\n }\n }],\n deleteDeployKey: [\"DELETE /repos/{owner}/{repo}/keys/{key_id}\"],\n deleteDeployment: [\"DELETE /repos/{owner}/{repo}/deployments/{deployment_id}\"],\n deleteFile: [\"DELETE /repos/{owner}/{repo}/contents/{path}\"],\n deleteInvitation: [\"DELETE /repos/{owner}/{repo}/invitations/{invitation_id}\"],\n deletePagesSite: [\"DELETE /repos/{owner}/{repo}/pages\", {\n mediaType: {\n previews: [\"switcheroo\"]\n }\n }],\n deletePullRequestReviewProtection: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews\"],\n deleteRelease: [\"DELETE /repos/{owner}/{repo}/releases/{release_id}\"],\n deleteReleaseAsset: [\"DELETE /repos/{owner}/{repo}/releases/assets/{asset_id}\"],\n deleteWebhook: [\"DELETE /repos/{owner}/{repo}/hooks/{hook_id}\"],\n disableAutomatedSecurityFixes: [\"DELETE /repos/{owner}/{repo}/automated-security-fixes\", {\n mediaType: {\n previews: [\"london\"]\n }\n }],\n disableVulnerabilityAlerts: [\"DELETE /repos/{owner}/{repo}/vulnerability-alerts\", {\n mediaType: {\n previews: [\"dorian\"]\n }\n }],\n downloadArchive: [\"GET /repos/{owner}/{repo}/zipball/{ref}\", {}, {\n renamed: [\"repos\", \"downloadZipballArchive\"]\n }],\n downloadTarballArchive: [\"GET /repos/{owner}/{repo}/tarball/{ref}\"],\n downloadZipballArchive: [\"GET /repos/{owner}/{repo}/zipball/{ref}\"],\n enableAutomatedSecurityFixes: [\"PUT /repos/{owner}/{repo}/automated-security-fixes\", {\n mediaType: {\n previews: [\"london\"]\n }\n }],\n enableVulnerabilityAlerts: [\"PUT /repos/{owner}/{repo}/vulnerability-alerts\", {\n mediaType: {\n previews: [\"dorian\"]\n }\n }],\n get: [\"GET /repos/{owner}/{repo}\"],\n getAccessRestrictions: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions\"],\n getAdminBranchProtection: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins\"],\n getAllEnvironments: [\"GET /repos/{owner}/{repo}/environments\"],\n getAllStatusCheckContexts: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\"],\n getAllTopics: [\"GET /repos/{owner}/{repo}/topics\", {\n mediaType: {\n previews: [\"mercy\"]\n }\n }],\n getAppsWithAccessToProtectedBranch: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\"],\n getBranch: [\"GET /repos/{owner}/{repo}/branches/{branch}\"],\n getBranchProtection: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection\"],\n getClones: [\"GET /repos/{owner}/{repo}/traffic/clones\"],\n getCodeFrequencyStats: [\"GET /repos/{owner}/{repo}/stats/code_frequency\"],\n getCollaboratorPermissionLevel: [\"GET /repos/{owner}/{repo}/collaborators/{username}/permission\"],\n getCombinedStatusForRef: [\"GET /repos/{owner}/{repo}/commits/{ref}/status\"],\n getCommit: [\"GET /repos/{owner}/{repo}/commits/{ref}\"],\n getCommitActivityStats: [\"GET /repos/{owner}/{repo}/stats/commit_activity\"],\n getCommitComment: [\"GET /repos/{owner}/{repo}/comments/{comment_id}\"],\n getCommitSignatureProtection: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures\", {\n mediaType: {\n previews: [\"zzzax\"]\n }\n }],\n getCommunityProfileMetrics: [\"GET /repos/{owner}/{repo}/community/profile\"],\n getContent: [\"GET /repos/{owner}/{repo}/contents/{path}\"],\n getContributorsStats: [\"GET /repos/{owner}/{repo}/stats/contributors\"],\n getDeployKey: [\"GET /repos/{owner}/{repo}/keys/{key_id}\"],\n getDeployment: [\"GET /repos/{owner}/{repo}/deployments/{deployment_id}\"],\n getDeploymentStatus: [\"GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}\"],\n getEnvironment: [\"GET /repos/{owner}/{repo}/environments/{environment_name}\"],\n getLatestPagesBuild: [\"GET /repos/{owner}/{repo}/pages/builds/latest\"],\n getLatestRelease: [\"GET /repos/{owner}/{repo}/releases/latest\"],\n getPages: [\"GET /repos/{owner}/{repo}/pages\"],\n getPagesBuild: [\"GET /repos/{owner}/{repo}/pages/builds/{build_id}\"],\n getPagesHealthCheck: [\"GET /repos/{owner}/{repo}/pages/health\"],\n getParticipationStats: [\"GET /repos/{owner}/{repo}/stats/participation\"],\n getPullRequestReviewProtection: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews\"],\n getPunchCardStats: [\"GET /repos/{owner}/{repo}/stats/punch_card\"],\n getReadme: [\"GET /repos/{owner}/{repo}/readme\"],\n getReadmeInDirectory: [\"GET /repos/{owner}/{repo}/readme/{dir}\"],\n getRelease: [\"GET /repos/{owner}/{repo}/releases/{release_id}\"],\n getReleaseAsset: [\"GET /repos/{owner}/{repo}/releases/assets/{asset_id}\"],\n getReleaseByTag: [\"GET /repos/{owner}/{repo}/releases/tags/{tag}\"],\n getStatusChecksProtection: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks\"],\n getTeamsWithAccessToProtectedBranch: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\"],\n getTopPaths: [\"GET /repos/{owner}/{repo}/traffic/popular/paths\"],\n getTopReferrers: [\"GET /repos/{owner}/{repo}/traffic/popular/referrers\"],\n getUsersWithAccessToProtectedBranch: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\"],\n getViews: [\"GET /repos/{owner}/{repo}/traffic/views\"],\n getWebhook: [\"GET /repos/{owner}/{repo}/hooks/{hook_id}\"],\n getWebhookConfigForRepo: [\"GET /repos/{owner}/{repo}/hooks/{hook_id}/config\"],\n listBranches: [\"GET /repos/{owner}/{repo}/branches\"],\n listBranchesForHeadCommit: [\"GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head\", {\n mediaType: {\n previews: [\"groot\"]\n }\n }],\n listCollaborators: [\"GET /repos/{owner}/{repo}/collaborators\"],\n listCommentsForCommit: [\"GET /repos/{owner}/{repo}/commits/{commit_sha}/comments\"],\n listCommitCommentsForRepo: [\"GET /repos/{owner}/{repo}/comments\"],\n listCommitStatusesForRef: [\"GET /repos/{owner}/{repo}/commits/{ref}/statuses\"],\n listCommits: [\"GET /repos/{owner}/{repo}/commits\"],\n listContributors: [\"GET /repos/{owner}/{repo}/contributors\"],\n listDeployKeys: [\"GET /repos/{owner}/{repo}/keys\"],\n listDeploymentStatuses: [\"GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses\"],\n listDeployments: [\"GET /repos/{owner}/{repo}/deployments\"],\n listForAuthenticatedUser: [\"GET /user/repos\"],\n listForOrg: [\"GET /orgs/{org}/repos\"],\n listForUser: [\"GET /users/{username}/repos\"],\n listForks: [\"GET /repos/{owner}/{repo}/forks\"],\n listInvitations: [\"GET /repos/{owner}/{repo}/invitations\"],\n listInvitationsForAuthenticatedUser: [\"GET /user/repository_invitations\"],\n listLanguages: [\"GET /repos/{owner}/{repo}/languages\"],\n listPagesBuilds: [\"GET /repos/{owner}/{repo}/pages/builds\"],\n listPublic: [\"GET /repositories\"],\n listPullRequestsAssociatedWithCommit: [\"GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls\", {\n mediaType: {\n previews: [\"groot\"]\n }\n }],\n listReleaseAssets: [\"GET /repos/{owner}/{repo}/releases/{release_id}/assets\"],\n listReleases: [\"GET /repos/{owner}/{repo}/releases\"],\n listTags: [\"GET /repos/{owner}/{repo}/tags\"],\n listTeams: [\"GET /repos/{owner}/{repo}/teams\"],\n listWebhooks: [\"GET /repos/{owner}/{repo}/hooks\"],\n merge: [\"POST /repos/{owner}/{repo}/merges\"],\n pingWebhook: [\"POST /repos/{owner}/{repo}/hooks/{hook_id}/pings\"],\n removeAppAccessRestrictions: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\", {}, {\n mapToData: \"apps\"\n }],\n removeCollaborator: [\"DELETE /repos/{owner}/{repo}/collaborators/{username}\"],\n removeStatusCheckContexts: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\", {}, {\n mapToData: \"contexts\"\n }],\n removeStatusCheckProtection: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks\"],\n removeTeamAccessRestrictions: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\", {}, {\n mapToData: \"teams\"\n }],\n removeUserAccessRestrictions: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\", {}, {\n mapToData: \"users\"\n }],\n renameBranch: [\"POST /repos/{owner}/{repo}/branches/{branch}/rename\"],\n replaceAllTopics: [\"PUT /repos/{owner}/{repo}/topics\", {\n mediaType: {\n previews: [\"mercy\"]\n }\n }],\n requestPagesBuild: [\"POST /repos/{owner}/{repo}/pages/builds\"],\n setAdminBranchProtection: [\"POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins\"],\n setAppAccessRestrictions: [\"PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\", {}, {\n mapToData: \"apps\"\n }],\n setStatusCheckContexts: [\"PUT /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\", {}, {\n mapToData: \"contexts\"\n }],\n setTeamAccessRestrictions: [\"PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\", {}, {\n mapToData: \"teams\"\n }],\n setUserAccessRestrictions: [\"PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\", {}, {\n mapToData: \"users\"\n }],\n testPushWebhook: [\"POST /repos/{owner}/{repo}/hooks/{hook_id}/tests\"],\n transfer: [\"POST /repos/{owner}/{repo}/transfer\"],\n update: [\"PATCH /repos/{owner}/{repo}\"],\n updateBranchProtection: [\"PUT /repos/{owner}/{repo}/branches/{branch}/protection\"],\n updateCommitComment: [\"PATCH /repos/{owner}/{repo}/comments/{comment_id}\"],\n updateInformationAboutPagesSite: [\"PUT /repos/{owner}/{repo}/pages\"],\n updateInvitation: [\"PATCH /repos/{owner}/{repo}/invitations/{invitation_id}\"],\n updatePullRequestReviewProtection: [\"PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews\"],\n updateRelease: [\"PATCH /repos/{owner}/{repo}/releases/{release_id}\"],\n updateReleaseAsset: [\"PATCH /repos/{owner}/{repo}/releases/assets/{asset_id}\"],\n updateStatusCheckPotection: [\"PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks\", {}, {\n renamed: [\"repos\", \"updateStatusCheckProtection\"]\n }],\n updateStatusCheckProtection: [\"PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks\"],\n updateWebhook: [\"PATCH /repos/{owner}/{repo}/hooks/{hook_id}\"],\n updateWebhookConfigForRepo: [\"PATCH /repos/{owner}/{repo}/hooks/{hook_id}/config\"],\n uploadReleaseAsset: [\"POST /repos/{owner}/{repo}/releases/{release_id}/assets{?name,label}\", {\n baseUrl: \"https://uploads.github.com\"\n }]\n },\n search: {\n code: [\"GET /search/code\"],\n commits: [\"GET /search/commits\", {\n mediaType: {\n previews: [\"cloak\"]\n }\n }],\n issuesAndPullRequests: [\"GET /search/issues\"],\n labels: [\"GET /search/labels\"],\n repos: [\"GET /search/repositories\"],\n topics: [\"GET /search/topics\", {\n mediaType: {\n previews: [\"mercy\"]\n }\n }],\n users: [\"GET /search/users\"]\n },\n secretScanning: {\n getAlert: [\"GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}\"],\n listAlertsForRepo: [\"GET /repos/{owner}/{repo}/secret-scanning/alerts\"],\n updateAlert: [\"PATCH /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}\"]\n },\n teams: {\n addOrUpdateMembershipForUserInOrg: [\"PUT /orgs/{org}/teams/{team_slug}/memberships/{username}\"],\n addOrUpdateProjectPermissionsInOrg: [\"PUT /orgs/{org}/teams/{team_slug}/projects/{project_id}\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n addOrUpdateRepoPermissionsInOrg: [\"PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}\"],\n checkPermissionsForProjectInOrg: [\"GET /orgs/{org}/teams/{team_slug}/projects/{project_id}\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n checkPermissionsForRepoInOrg: [\"GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}\"],\n create: [\"POST /orgs/{org}/teams\"],\n createDiscussionCommentInOrg: [\"POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments\"],\n createDiscussionInOrg: [\"POST /orgs/{org}/teams/{team_slug}/discussions\"],\n deleteDiscussionCommentInOrg: [\"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}\"],\n deleteDiscussionInOrg: [\"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}\"],\n deleteInOrg: [\"DELETE /orgs/{org}/teams/{team_slug}\"],\n getByName: [\"GET /orgs/{org}/teams/{team_slug}\"],\n getDiscussionCommentInOrg: [\"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}\"],\n getDiscussionInOrg: [\"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}\"],\n getMembershipForUserInOrg: [\"GET /orgs/{org}/teams/{team_slug}/memberships/{username}\"],\n list: [\"GET /orgs/{org}/teams\"],\n listChildInOrg: [\"GET /orgs/{org}/teams/{team_slug}/teams\"],\n listDiscussionCommentsInOrg: [\"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments\"],\n listDiscussionsInOrg: [\"GET /orgs/{org}/teams/{team_slug}/discussions\"],\n listForAuthenticatedUser: [\"GET /user/teams\"],\n listMembersInOrg: [\"GET /orgs/{org}/teams/{team_slug}/members\"],\n listPendingInvitationsInOrg: [\"GET /orgs/{org}/teams/{team_slug}/invitations\"],\n listProjectsInOrg: [\"GET /orgs/{org}/teams/{team_slug}/projects\", {\n mediaType: {\n previews: [\"inertia\"]\n }\n }],\n listReposInOrg: [\"GET /orgs/{org}/teams/{team_slug}/repos\"],\n removeMembershipForUserInOrg: [\"DELETE /orgs/{org}/teams/{team_slug}/memberships/{username}\"],\n removeProjectInOrg: [\"DELETE /orgs/{org}/teams/{team_slug}/projects/{project_id}\"],\n removeRepoInOrg: [\"DELETE /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}\"],\n updateDiscussionCommentInOrg: [\"PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}\"],\n updateDiscussionInOrg: [\"PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}\"],\n updateInOrg: [\"PATCH /orgs/{org}/teams/{team_slug}\"]\n },\n users: {\n addEmailForAuthenticated: [\"POST /user/emails\"],\n block: [\"PUT /user/blocks/{username}\"],\n checkBlocked: [\"GET /user/blocks/{username}\"],\n checkFollowingForUser: [\"GET /users/{username}/following/{target_user}\"],\n checkPersonIsFollowedByAuthenticated: [\"GET /user/following/{username}\"],\n createGpgKeyForAuthenticated: [\"POST /user/gpg_keys\"],\n createPublicSshKeyForAuthenticated: [\"POST /user/keys\"],\n deleteEmailForAuthenticated: [\"DELETE /user/emails\"],\n deleteGpgKeyForAuthenticated: [\"DELETE /user/gpg_keys/{gpg_key_id}\"],\n deletePublicSshKeyForAuthenticated: [\"DELETE /user/keys/{key_id}\"],\n follow: [\"PUT /user/following/{username}\"],\n getAuthenticated: [\"GET /user\"],\n getByUsername: [\"GET /users/{username}\"],\n getContextForUser: [\"GET /users/{username}/hovercard\"],\n getGpgKeyForAuthenticated: [\"GET /user/gpg_keys/{gpg_key_id}\"],\n getPublicSshKeyForAuthenticated: [\"GET /user/keys/{key_id}\"],\n list: [\"GET /users\"],\n listBlockedByAuthenticated: [\"GET /user/blocks\"],\n listEmailsForAuthenticated: [\"GET /user/emails\"],\n listFollowedByAuthenticated: [\"GET /user/following\"],\n listFollowersForAuthenticatedUser: [\"GET /user/followers\"],\n listFollowersForUser: [\"GET /users/{username}/followers\"],\n listFollowingForUser: [\"GET /users/{username}/following\"],\n listGpgKeysForAuthenticated: [\"GET /user/gpg_keys\"],\n listGpgKeysForUser: [\"GET /users/{username}/gpg_keys\"],\n listPublicEmailsForAuthenticated: [\"GET /user/public_emails\"],\n listPublicKeysForUser: [\"GET /users/{username}/keys\"],\n listPublicSshKeysForAuthenticated: [\"GET /user/keys\"],\n setPrimaryEmailVisibilityForAuthenticated: [\"PATCH /user/email/visibility\"],\n unblock: [\"DELETE /user/blocks/{username}\"],\n unfollow: [\"DELETE /user/following/{username}\"],\n updateAuthenticated: [\"PATCH /user\"]\n }\n};\n\nconst VERSION = \"5.3.7\";\n\nfunction endpointsToMethods(octokit, endpointsMap) {\n const newMethods = {};\n\n for (const [scope, endpoints] of Object.entries(endpointsMap)) {\n for (const [methodName, endpoint] of Object.entries(endpoints)) {\n const [route, defaults, decorations] = endpoint;\n const [method, url] = route.split(/ /);\n const endpointDefaults = Object.assign({\n method,\n url\n }, defaults);\n\n if (!newMethods[scope]) {\n newMethods[scope] = {};\n }\n\n const scopeMethods = newMethods[scope];\n\n if (decorations) {\n scopeMethods[methodName] = decorate(octokit, scope, methodName, endpointDefaults, decorations);\n continue;\n }\n\n scopeMethods[methodName] = octokit.request.defaults(endpointDefaults);\n }\n }\n\n return newMethods;\n}\n\nfunction decorate(octokit, scope, methodName, defaults, decorations) {\n const requestWithDefaults = octokit.request.defaults(defaults);\n /* istanbul ignore next */\n\n function withDecorations(...args) {\n // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488\n let options = requestWithDefaults.endpoint.merge(...args); // There are currently no other decorations than `.mapToData`\n\n if (decorations.mapToData) {\n options = Object.assign({}, options, {\n data: options[decorations.mapToData],\n [decorations.mapToData]: undefined\n });\n return requestWithDefaults(options);\n }\n\n if (decorations.renamed) {\n const [newScope, newMethodName] = decorations.renamed;\n octokit.log.warn(`octokit.${scope}.${methodName}() has been renamed to octokit.${newScope}.${newMethodName}()`);\n }\n\n if (decorations.deprecated) {\n octokit.log.warn(decorations.deprecated);\n }\n\n if (decorations.renamedParameters) {\n // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488\n const options = requestWithDefaults.endpoint.merge(...args);\n\n for (const [name, alias] of Object.entries(decorations.renamedParameters)) {\n if (name in options) {\n octokit.log.warn(`\"${name}\" parameter is deprecated for \"octokit.${scope}.${methodName}()\". Use \"${alias}\" instead`);\n\n if (!(alias in options)) {\n options[alias] = options[name];\n }\n\n delete options[name];\n }\n }\n\n return requestWithDefaults(options);\n } // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488\n\n\n return requestWithDefaults(...args);\n }\n\n return Object.assign(withDecorations, requestWithDefaults);\n}\n\nfunction restEndpointMethods(octokit) {\n const api = endpointsToMethods(octokit, Endpoints);\n return {\n rest: api\n };\n}\nrestEndpointMethods.VERSION = VERSION;\nfunction legacyRestEndpointMethods(octokit) {\n const api = endpointsToMethods(octokit, Endpoints);\n return _objectSpread2(_objectSpread2({}, api), {}, {\n rest: api\n });\n}\nlegacyRestEndpointMethods.VERSION = VERSION;\n\nexports.legacyRestEndpointMethods = legacyRestEndpointMethods;\nexports.restEndpointMethods = restEndpointMethods;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nfunction _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }\n\nvar deprecation = require('deprecation');\nvar once = _interopDefault(require('once'));\n\nconst logOnceCode = once(deprecation => console.warn(deprecation));\nconst logOnceHeaders = once(deprecation => console.warn(deprecation));\n/**\n * Error with extra properties to help with debugging\n */\n\nclass RequestError extends Error {\n constructor(message, statusCode, options) {\n super(message); // Maintains proper stack trace (only available on V8)\n\n /* istanbul ignore next */\n\n if (Error.captureStackTrace) {\n Error.captureStackTrace(this, this.constructor);\n }\n\n this.name = \"HttpError\";\n this.status = statusCode;\n let headers;\n\n if (\"headers\" in options && typeof options.headers !== \"undefined\") {\n headers = options.headers;\n }\n\n if (\"response\" in options) {\n this.response = options.response;\n headers = options.response.headers;\n } // redact request credentials without mutating original request options\n\n\n const requestCopy = Object.assign({}, options.request);\n\n if (options.request.headers.authorization) {\n requestCopy.headers = Object.assign({}, options.request.headers, {\n authorization: options.request.headers.authorization.replace(/ .*$/, \" [REDACTED]\")\n });\n }\n\n requestCopy.url = requestCopy.url // client_id & client_secret can be passed as URL query parameters to increase rate limit\n // see https://developer.github.com/v3/#increasing-the-unauthenticated-rate-limit-for-oauth-applications\n .replace(/\\bclient_secret=\\w+/g, \"client_secret=[REDACTED]\") // OAuth tokens can be passed as URL query parameters, although it is not recommended\n // see https://developer.github.com/v3/#oauth2-token-sent-in-a-header\n .replace(/\\baccess_token=\\w+/g, \"access_token=[REDACTED]\");\n this.request = requestCopy; // deprecations\n\n Object.defineProperty(this, \"code\", {\n get() {\n logOnceCode(new deprecation.Deprecation(\"[@octokit/request-error] `error.code` is deprecated, use `error.status`.\"));\n return statusCode;\n }\n\n });\n Object.defineProperty(this, \"headers\", {\n get() {\n logOnceHeaders(new deprecation.Deprecation(\"[@octokit/request-error] `error.headers` is deprecated, use `error.response.headers`.\"));\n return headers || {};\n }\n\n });\n }\n\n}\n\nexports.RequestError = RequestError;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nfunction _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }\n\nvar endpoint = require('@octokit/endpoint');\nvar universalUserAgent = require('universal-user-agent');\nvar isPlainObject = require('is-plain-object');\nvar nodeFetch = _interopDefault(require('node-fetch'));\nvar requestError = require('@octokit/request-error');\n\nconst VERSION = \"5.6.0\";\n\nfunction getBufferResponse(response) {\n return response.arrayBuffer();\n}\n\nfunction fetchWrapper(requestOptions) {\n const log = requestOptions.request && requestOptions.request.log ? requestOptions.request.log : console;\n\n if (isPlainObject.isPlainObject(requestOptions.body) || Array.isArray(requestOptions.body)) {\n requestOptions.body = JSON.stringify(requestOptions.body);\n }\n\n let headers = {};\n let status;\n let url;\n const fetch = requestOptions.request && requestOptions.request.fetch || nodeFetch;\n return fetch(requestOptions.url, Object.assign({\n method: requestOptions.method,\n body: requestOptions.body,\n headers: requestOptions.headers,\n redirect: requestOptions.redirect\n }, // `requestOptions.request.agent` type is incompatible\n // see https://github.com/octokit/types.ts/pull/264\n requestOptions.request)).then(async response => {\n url = response.url;\n status = response.status;\n\n for (const keyAndValue of response.headers) {\n headers[keyAndValue[0]] = keyAndValue[1];\n }\n\n if (\"deprecation\" in headers) {\n const matches = headers.link && headers.link.match(/<([^>]+)>; rel=\"deprecation\"/);\n const deprecationLink = matches && matches.pop();\n log.warn(`[@octokit/request] \"${requestOptions.method} ${requestOptions.url}\" is deprecated. It is scheduled to be removed on ${headers.sunset}${deprecationLink ? `. See ${deprecationLink}` : \"\"}`);\n }\n\n if (status === 204 || status === 205) {\n return;\n } // GitHub API returns 200 for HEAD requests\n\n\n if (requestOptions.method === \"HEAD\") {\n if (status < 400) {\n return;\n }\n\n throw new requestError.RequestError(response.statusText, status, {\n response: {\n url,\n status,\n headers,\n data: undefined\n },\n request: requestOptions\n });\n }\n\n if (status === 304) {\n throw new requestError.RequestError(\"Not modified\", status, {\n response: {\n url,\n status,\n headers,\n data: await getResponseData(response)\n },\n request: requestOptions\n });\n }\n\n if (status >= 400) {\n const data = await getResponseData(response);\n const error = new requestError.RequestError(toErrorMessage(data), status, {\n response: {\n url,\n status,\n headers,\n data\n },\n request: requestOptions\n });\n throw error;\n }\n\n return getResponseData(response);\n }).then(data => {\n return {\n status,\n url,\n headers,\n data\n };\n }).catch(error => {\n if (error instanceof requestError.RequestError) throw error;\n throw new requestError.RequestError(error.message, 500, {\n request: requestOptions\n });\n });\n}\n\nasync function getResponseData(response) {\n const contentType = response.headers.get(\"content-type\");\n\n if (/application\\/json/.test(contentType)) {\n return response.json();\n }\n\n if (!contentType || /^text\\/|charset=utf-8$/.test(contentType)) {\n return response.text();\n }\n\n return getBufferResponse(response);\n}\n\nfunction toErrorMessage(data) {\n if (typeof data === \"string\") return data; // istanbul ignore else - just in case\n\n if (\"message\" in data) {\n if (Array.isArray(data.errors)) {\n return `${data.message}: ${data.errors.map(JSON.stringify).join(\", \")}`;\n }\n\n return data.message;\n } // istanbul ignore next - just in case\n\n\n return `Unknown error: ${JSON.stringify(data)}`;\n}\n\nfunction withDefaults(oldEndpoint, newDefaults) {\n const endpoint = oldEndpoint.defaults(newDefaults);\n\n const newApi = function (route, parameters) {\n const endpointOptions = endpoint.merge(route, parameters);\n\n if (!endpointOptions.request || !endpointOptions.request.hook) {\n return fetchWrapper(endpoint.parse(endpointOptions));\n }\n\n const request = (route, parameters) => {\n return fetchWrapper(endpoint.parse(endpoint.merge(route, parameters)));\n };\n\n Object.assign(request, {\n endpoint,\n defaults: withDefaults.bind(null, endpoint)\n });\n return endpointOptions.request.hook(request, endpointOptions);\n };\n\n return Object.assign(newApi, {\n endpoint,\n defaults: withDefaults.bind(null, endpoint)\n });\n}\n\nconst request = withDefaults(endpoint.endpoint, {\n headers: {\n \"user-agent\": `octokit-request.js/${VERSION} ${universalUserAgent.getUserAgent()}`\n }\n});\n\nexports.request = request;\n//# sourceMappingURL=index.js.map\n","'use strict';\nmodule.exports = balanced;\nfunction balanced(a, b, str) {\n if (a instanceof RegExp) a = maybeMatch(a, str);\n if (b instanceof RegExp) b = maybeMatch(b, str);\n\n var r = range(a, b, str);\n\n return r && {\n start: r[0],\n end: r[1],\n pre: str.slice(0, r[0]),\n body: str.slice(r[0] + a.length, r[1]),\n post: str.slice(r[1] + b.length)\n };\n}\n\nfunction maybeMatch(reg, str) {\n var m = str.match(reg);\n return m ? m[0] : null;\n}\n\nbalanced.range = range;\nfunction range(a, b, str) {\n var begs, beg, left, right, result;\n var ai = str.indexOf(a);\n var bi = str.indexOf(b, ai + 1);\n var i = ai;\n\n if (ai >= 0 && bi > 0) {\n begs = [];\n left = str.length;\n\n while (i >= 0 && !result) {\n if (i == ai) {\n begs.push(i);\n ai = str.indexOf(a, i + 1);\n } else if (begs.length == 1) {\n result = [ begs.pop(), bi ];\n } else {\n beg = begs.pop();\n if (beg < left) {\n left = beg;\n right = bi;\n }\n\n bi = str.indexOf(b, i + 1);\n }\n\n i = ai < bi && ai >= 0 ? ai : bi;\n }\n\n if (begs.length) {\n result = [ left, right ];\n }\n }\n\n return result;\n}\n","var register = require('./lib/register')\nvar addHook = require('./lib/add')\nvar removeHook = require('./lib/remove')\n\n// bind with array of arguments: https://stackoverflow.com/a/21792913\nvar bind = Function.bind\nvar bindable = bind.bind(bind)\n\nfunction bindApi (hook, state, name) {\n var removeHookRef = bindable(removeHook, null).apply(null, name ? [state, name] : [state])\n hook.api = { remove: removeHookRef }\n hook.remove = removeHookRef\n\n ;['before', 'error', 'after', 'wrap'].forEach(function (kind) {\n var args = name ? [state, kind, name] : [state, kind]\n hook[kind] = hook.api[kind] = bindable(addHook, null).apply(null, args)\n })\n}\n\nfunction HookSingular () {\n var singularHookName = 'h'\n var singularHookState = {\n registry: {}\n }\n var singularHook = register.bind(null, singularHookState, singularHookName)\n bindApi(singularHook, singularHookState, singularHookName)\n return singularHook\n}\n\nfunction HookCollection () {\n var state = {\n registry: {}\n }\n\n var hook = register.bind(null, state)\n bindApi(hook, state)\n\n return hook\n}\n\nvar collectionHookDeprecationMessageDisplayed = false\nfunction Hook () {\n if (!collectionHookDeprecationMessageDisplayed) {\n console.warn('[before-after-hook]: \"Hook()\" repurposing warning, use \"Hook.Collection()\". Read more: https://git.io/upgrade-before-after-hook-to-1.4')\n collectionHookDeprecationMessageDisplayed = true\n }\n return HookCollection()\n}\n\nHook.Singular = HookSingular.bind()\nHook.Collection = HookCollection.bind()\n\nmodule.exports = Hook\n// expose constructors as a named property for TypeScript\nmodule.exports.Hook = Hook\nmodule.exports.Singular = Hook.Singular\nmodule.exports.Collection = Hook.Collection\n","module.exports = addHook;\n\nfunction addHook(state, kind, name, hook) {\n var orig = hook;\n if (!state.registry[name]) {\n state.registry[name] = [];\n }\n\n if (kind === \"before\") {\n hook = function (method, options) {\n return Promise.resolve()\n .then(orig.bind(null, options))\n .then(method.bind(null, options));\n };\n }\n\n if (kind === \"after\") {\n hook = function (method, options) {\n var result;\n return Promise.resolve()\n .then(method.bind(null, options))\n .then(function (result_) {\n result = result_;\n return orig(result, options);\n })\n .then(function () {\n return result;\n });\n };\n }\n\n if (kind === \"error\") {\n hook = function (method, options) {\n return Promise.resolve()\n .then(method.bind(null, options))\n .catch(function (error) {\n return orig(error, options);\n });\n };\n }\n\n state.registry[name].push({\n hook: hook,\n orig: orig,\n });\n}\n","module.exports = register;\n\nfunction register(state, name, method, options) {\n if (typeof method !== \"function\") {\n throw new Error(\"method for before hook must be a function\");\n }\n\n if (!options) {\n options = {};\n }\n\n if (Array.isArray(name)) {\n return name.reverse().reduce(function (callback, name) {\n return register.bind(null, state, name, callback, options);\n }, method)();\n }\n\n return Promise.resolve().then(function () {\n if (!state.registry[name]) {\n return method(options);\n }\n\n return state.registry[name].reduce(function (method, registered) {\n return registered.hook.bind(null, method, options);\n }, method)();\n });\n}\n","module.exports = removeHook;\n\nfunction removeHook(state, name, method) {\n if (!state.registry[name]) {\n return;\n }\n\n var index = state.registry[name]\n .map(function (registered) {\n return registered.orig;\n })\n .indexOf(method);\n\n if (index === -1) {\n return;\n }\n\n state.registry[name].splice(index, 1);\n}\n","var concatMap = require('concat-map');\nvar balanced = require('balanced-match');\n\nmodule.exports = expandTop;\n\nvar escSlash = '\\0SLASH'+Math.random()+'\\0';\nvar escOpen = '\\0OPEN'+Math.random()+'\\0';\nvar escClose = '\\0CLOSE'+Math.random()+'\\0';\nvar escComma = '\\0COMMA'+Math.random()+'\\0';\nvar escPeriod = '\\0PERIOD'+Math.random()+'\\0';\n\nfunction numeric(str) {\n return parseInt(str, 10) == str\n ? parseInt(str, 10)\n : str.charCodeAt(0);\n}\n\nfunction escapeBraces(str) {\n return str.split('\\\\\\\\').join(escSlash)\n .split('\\\\{').join(escOpen)\n .split('\\\\}').join(escClose)\n .split('\\\\,').join(escComma)\n .split('\\\\.').join(escPeriod);\n}\n\nfunction unescapeBraces(str) {\n return str.split(escSlash).join('\\\\')\n .split(escOpen).join('{')\n .split(escClose).join('}')\n .split(escComma).join(',')\n .split(escPeriod).join('.');\n}\n\n\n// Basically just str.split(\",\"), but handling cases\n// where we have nested braced sections, which should be\n// treated as individual members, like {a,{b,c},d}\nfunction parseCommaParts(str) {\n if (!str)\n return [''];\n\n var parts = [];\n var m = balanced('{', '}', str);\n\n if (!m)\n return str.split(',');\n\n var pre = m.pre;\n var body = m.body;\n var post = m.post;\n var p = pre.split(',');\n\n p[p.length-1] += '{' + body + '}';\n var postParts = parseCommaParts(post);\n if (post.length) {\n p[p.length-1] += postParts.shift();\n p.push.apply(p, postParts);\n }\n\n parts.push.apply(parts, p);\n\n return parts;\n}\n\nfunction expandTop(str) {\n if (!str)\n return [];\n\n // I don't know why Bash 4.3 does this, but it does.\n // Anything starting with {} will have the first two bytes preserved\n // but *only* at the top level, so {},a}b will not expand to anything,\n // but a{},b}c will be expanded to [a}c,abc].\n // One could argue that this is a bug in Bash, but since the goal of\n // this module is to match Bash's rules, we escape a leading {}\n if (str.substr(0, 2) === '{}') {\n str = '\\\\{\\\\}' + str.substr(2);\n }\n\n return expand(escapeBraces(str), true).map(unescapeBraces);\n}\n\nfunction identity(e) {\n return e;\n}\n\nfunction embrace(str) {\n return '{' + str + '}';\n}\nfunction isPadded(el) {\n return /^-?0\\d/.test(el);\n}\n\nfunction lte(i, y) {\n return i <= y;\n}\nfunction gte(i, y) {\n return i >= y;\n}\n\nfunction expand(str, isTop) {\n var expansions = [];\n\n var m = balanced('{', '}', str);\n if (!m || /\\$$/.test(m.pre)) return [str];\n\n var isNumericSequence = /^-?\\d+\\.\\.-?\\d+(?:\\.\\.-?\\d+)?$/.test(m.body);\n var isAlphaSequence = /^[a-zA-Z]\\.\\.[a-zA-Z](?:\\.\\.-?\\d+)?$/.test(m.body);\n var isSequence = isNumericSequence || isAlphaSequence;\n var isOptions = m.body.indexOf(',') >= 0;\n if (!isSequence && !isOptions) {\n // {a},b}\n if (m.post.match(/,.*\\}/)) {\n str = m.pre + '{' + m.body + escClose + m.post;\n return expand(str);\n }\n return [str];\n }\n\n var n;\n if (isSequence) {\n n = m.body.split(/\\.\\./);\n } else {\n n = parseCommaParts(m.body);\n if (n.length === 1) {\n // x{{a,b}}y ==> x{a}y x{b}y\n n = expand(n[0], false).map(embrace);\n if (n.length === 1) {\n var post = m.post.length\n ? expand(m.post, false)\n : [''];\n return post.map(function(p) {\n return m.pre + n[0] + p;\n });\n }\n }\n }\n\n // at this point, n is the parts, and we know it's not a comma set\n // with a single entry.\n\n // no need to expand pre, since it is guaranteed to be free of brace-sets\n var pre = m.pre;\n var post = m.post.length\n ? expand(m.post, false)\n : [''];\n\n var N;\n\n if (isSequence) {\n var x = numeric(n[0]);\n var y = numeric(n[1]);\n var width = Math.max(n[0].length, n[1].length)\n var incr = n.length == 3\n ? Math.abs(numeric(n[2]))\n : 1;\n var test = lte;\n var reverse = y < x;\n if (reverse) {\n incr *= -1;\n test = gte;\n }\n var pad = n.some(isPadded);\n\n N = [];\n\n for (var i = x; test(i, y); i += incr) {\n var c;\n if (isAlphaSequence) {\n c = String.fromCharCode(i);\n if (c === '\\\\')\n c = '';\n } else {\n c = String(i);\n if (pad) {\n var need = width - c.length;\n if (need > 0) {\n var z = new Array(need + 1).join('0');\n if (i < 0)\n c = '-' + z + c.slice(1);\n else\n c = z + c;\n }\n }\n }\n N.push(c);\n }\n } else {\n N = concatMap(n, function(el) { return expand(el, false) });\n }\n\n for (var j = 0; j < N.length; j++) {\n for (var k = 0; k < post.length; k++) {\n var expansion = pre + N[j] + post[k];\n if (!isTop || isSequence || expansion)\n expansions.push(expansion);\n }\n }\n\n return expansions;\n}\n\n","module.exports = function (xs, fn) {\n var res = [];\n for (var i = 0; i < xs.length; i++) {\n var x = fn(xs[i], i);\n if (isArray(x)) res.push.apply(res, x);\n else res.push(x);\n }\n return res;\n};\n\nvar isArray = Array.isArray || function (xs) {\n return Object.prototype.toString.call(xs) === '[object Array]';\n};\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nclass Deprecation extends Error {\n constructor(message) {\n super(message); // Maintains proper stack trace (only available on V8)\n\n /* istanbul ignore next */\n\n if (Error.captureStackTrace) {\n Error.captureStackTrace(this, this.constructor);\n }\n\n this.name = 'Deprecation';\n }\n\n}\n\nexports.Deprecation = Deprecation;\n","module.exports = realpath\nrealpath.realpath = realpath\nrealpath.sync = realpathSync\nrealpath.realpathSync = realpathSync\nrealpath.monkeypatch = monkeypatch\nrealpath.unmonkeypatch = unmonkeypatch\n\nvar fs = require('fs')\nvar origRealpath = fs.realpath\nvar origRealpathSync = fs.realpathSync\n\nvar version = process.version\nvar ok = /^v[0-5]\\./.test(version)\nvar old = require('./old.js')\n\nfunction newError (er) {\n return er && er.syscall === 'realpath' && (\n er.code === 'ELOOP' ||\n er.code === 'ENOMEM' ||\n er.code === 'ENAMETOOLONG'\n )\n}\n\nfunction realpath (p, cache, cb) {\n if (ok) {\n return origRealpath(p, cache, cb)\n }\n\n if (typeof cache === 'function') {\n cb = cache\n cache = null\n }\n origRealpath(p, cache, function (er, result) {\n if (newError(er)) {\n old.realpath(p, cache, cb)\n } else {\n cb(er, result)\n }\n })\n}\n\nfunction realpathSync (p, cache) {\n if (ok) {\n return origRealpathSync(p, cache)\n }\n\n try {\n return origRealpathSync(p, cache)\n } catch (er) {\n if (newError(er)) {\n return old.realpathSync(p, cache)\n } else {\n throw er\n }\n }\n}\n\nfunction monkeypatch () {\n fs.realpath = realpath\n fs.realpathSync = realpathSync\n}\n\nfunction unmonkeypatch () {\n fs.realpath = origRealpath\n fs.realpathSync = origRealpathSync\n}\n","// Copyright Joyent, Inc. and other Node contributors.\n//\n// Permission is hereby granted, free of charge, to any person obtaining a\n// copy of this software and associated documentation files (the\n// \"Software\"), to deal in the Software without restriction, including\n// without limitation the rights to use, copy, modify, merge, publish,\n// distribute, sublicense, and/or sell copies of the Software, and to permit\n// persons to whom the Software is furnished to do so, subject to the\n// following conditions:\n//\n// The above copyright notice and this permission notice shall be included\n// in all copies or substantial portions of the Software.\n//\n// THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS\n// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF\n// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN\n// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,\n// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR\n// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE\n// USE OR OTHER DEALINGS IN THE SOFTWARE.\n\nvar pathModule = require('path');\nvar isWindows = process.platform === 'win32';\nvar fs = require('fs');\n\n// JavaScript implementation of realpath, ported from node pre-v6\n\nvar DEBUG = process.env.NODE_DEBUG && /fs/.test(process.env.NODE_DEBUG);\n\nfunction rethrow() {\n // Only enable in debug mode. A backtrace uses ~1000 bytes of heap space and\n // is fairly slow to generate.\n var callback;\n if (DEBUG) {\n var backtrace = new Error;\n callback = debugCallback;\n } else\n callback = missingCallback;\n\n return callback;\n\n function debugCallback(err) {\n if (err) {\n backtrace.message = err.message;\n err = backtrace;\n missingCallback(err);\n }\n }\n\n function missingCallback(err) {\n if (err) {\n if (process.throwDeprecation)\n throw err; // Forgot a callback but don't know where? Use NODE_DEBUG=fs\n else if (!process.noDeprecation) {\n var msg = 'fs: missing callback ' + (err.stack || err.message);\n if (process.traceDeprecation)\n console.trace(msg);\n else\n console.error(msg);\n }\n }\n }\n}\n\nfunction maybeCallback(cb) {\n return typeof cb === 'function' ? cb : rethrow();\n}\n\nvar normalize = pathModule.normalize;\n\n// Regexp that finds the next partion of a (partial) path\n// result is [base_with_slash, base], e.g. ['somedir/', 'somedir']\nif (isWindows) {\n var nextPartRe = /(.*?)(?:[\\/\\\\]+|$)/g;\n} else {\n var nextPartRe = /(.*?)(?:[\\/]+|$)/g;\n}\n\n// Regex to find the device root, including trailing slash. E.g. 'c:\\\\'.\nif (isWindows) {\n var splitRootRe = /^(?:[a-zA-Z]:|[\\\\\\/]{2}[^\\\\\\/]+[\\\\\\/][^\\\\\\/]+)?[\\\\\\/]*/;\n} else {\n var splitRootRe = /^[\\/]*/;\n}\n\nexports.realpathSync = function realpathSync(p, cache) {\n // make p is absolute\n p = pathModule.resolve(p);\n\n if (cache && Object.prototype.hasOwnProperty.call(cache, p)) {\n return cache[p];\n }\n\n var original = p,\n seenLinks = {},\n knownHard = {};\n\n // current character position in p\n var pos;\n // the partial path so far, including a trailing slash if any\n var current;\n // the partial path without a trailing slash (except when pointing at a root)\n var base;\n // the partial path scanned in the previous round, with slash\n var previous;\n\n start();\n\n function start() {\n // Skip over roots\n var m = splitRootRe.exec(p);\n pos = m[0].length;\n current = m[0];\n base = m[0];\n previous = '';\n\n // On windows, check that the root exists. On unix there is no need.\n if (isWindows && !knownHard[base]) {\n fs.lstatSync(base);\n knownHard[base] = true;\n }\n }\n\n // walk down the path, swapping out linked pathparts for their real\n // values\n // NB: p.length changes.\n while (pos < p.length) {\n // find the next part\n nextPartRe.lastIndex = pos;\n var result = nextPartRe.exec(p);\n previous = current;\n current += result[0];\n base = previous + result[1];\n pos = nextPartRe.lastIndex;\n\n // continue if not a symlink\n if (knownHard[base] || (cache && cache[base] === base)) {\n continue;\n }\n\n var resolvedLink;\n if (cache && Object.prototype.hasOwnProperty.call(cache, base)) {\n // some known symbolic link. no need to stat again.\n resolvedLink = cache[base];\n } else {\n var stat = fs.lstatSync(base);\n if (!stat.isSymbolicLink()) {\n knownHard[base] = true;\n if (cache) cache[base] = base;\n continue;\n }\n\n // read the link if it wasn't read before\n // dev/ino always return 0 on windows, so skip the check.\n var linkTarget = null;\n if (!isWindows) {\n var id = stat.dev.toString(32) + ':' + stat.ino.toString(32);\n if (seenLinks.hasOwnProperty(id)) {\n linkTarget = seenLinks[id];\n }\n }\n if (linkTarget === null) {\n fs.statSync(base);\n linkTarget = fs.readlinkSync(base);\n }\n resolvedLink = pathModule.resolve(previous, linkTarget);\n // track this, if given a cache.\n if (cache) cache[base] = resolvedLink;\n if (!isWindows) seenLinks[id] = linkTarget;\n }\n\n // resolve the link, then start over\n p = pathModule.resolve(resolvedLink, p.slice(pos));\n start();\n }\n\n if (cache) cache[original] = p;\n\n return p;\n};\n\n\nexports.realpath = function realpath(p, cache, cb) {\n if (typeof cb !== 'function') {\n cb = maybeCallback(cache);\n cache = null;\n }\n\n // make p is absolute\n p = pathModule.resolve(p);\n\n if (cache && Object.prototype.hasOwnProperty.call(cache, p)) {\n return process.nextTick(cb.bind(null, null, cache[p]));\n }\n\n var original = p,\n seenLinks = {},\n knownHard = {};\n\n // current character position in p\n var pos;\n // the partial path so far, including a trailing slash if any\n var current;\n // the partial path without a trailing slash (except when pointing at a root)\n var base;\n // the partial path scanned in the previous round, with slash\n var previous;\n\n start();\n\n function start() {\n // Skip over roots\n var m = splitRootRe.exec(p);\n pos = m[0].length;\n current = m[0];\n base = m[0];\n previous = '';\n\n // On windows, check that the root exists. On unix there is no need.\n if (isWindows && !knownHard[base]) {\n fs.lstat(base, function(err) {\n if (err) return cb(err);\n knownHard[base] = true;\n LOOP();\n });\n } else {\n process.nextTick(LOOP);\n }\n }\n\n // walk down the path, swapping out linked pathparts for their real\n // values\n function LOOP() {\n // stop if scanned past end of path\n if (pos >= p.length) {\n if (cache) cache[original] = p;\n return cb(null, p);\n }\n\n // find the next part\n nextPartRe.lastIndex = pos;\n var result = nextPartRe.exec(p);\n previous = current;\n current += result[0];\n base = previous + result[1];\n pos = nextPartRe.lastIndex;\n\n // continue if not a symlink\n if (knownHard[base] || (cache && cache[base] === base)) {\n return process.nextTick(LOOP);\n }\n\n if (cache && Object.prototype.hasOwnProperty.call(cache, base)) {\n // known symbolic link. no need to stat again.\n return gotResolvedLink(cache[base]);\n }\n\n return fs.lstat(base, gotStat);\n }\n\n function gotStat(err, stat) {\n if (err) return cb(err);\n\n // if not a symlink, skip to the next path part\n if (!stat.isSymbolicLink()) {\n knownHard[base] = true;\n if (cache) cache[base] = base;\n return process.nextTick(LOOP);\n }\n\n // stat & read the link if not read before\n // call gotTarget as soon as the link target is known\n // dev/ino always return 0 on windows, so skip the check.\n if (!isWindows) {\n var id = stat.dev.toString(32) + ':' + stat.ino.toString(32);\n if (seenLinks.hasOwnProperty(id)) {\n return gotTarget(null, seenLinks[id], base);\n }\n }\n fs.stat(base, function(err) {\n if (err) return cb(err);\n\n fs.readlink(base, function(err, target) {\n if (!isWindows) seenLinks[id] = target;\n gotTarget(err, target);\n });\n });\n }\n\n function gotTarget(err, target, base) {\n if (err) return cb(err);\n\n var resolvedLink = pathModule.resolve(previous, target);\n if (cache) cache[base] = resolvedLink;\n gotResolvedLink(resolvedLink);\n }\n\n function gotResolvedLink(resolvedLink) {\n // resolve the link, then start over\n p = pathModule.resolve(resolvedLink, p.slice(pos));\n start();\n }\n};\n","exports.setopts = setopts\nexports.ownProp = ownProp\nexports.makeAbs = makeAbs\nexports.finish = finish\nexports.mark = mark\nexports.isIgnored = isIgnored\nexports.childrenIgnored = childrenIgnored\n\nfunction ownProp (obj, field) {\n return Object.prototype.hasOwnProperty.call(obj, field)\n}\n\nvar fs = require(\"fs\")\nvar path = require(\"path\")\nvar minimatch = require(\"minimatch\")\nvar isAbsolute = require(\"path-is-absolute\")\nvar Minimatch = minimatch.Minimatch\n\nfunction alphasort (a, b) {\n return a.localeCompare(b, 'en')\n}\n\nfunction setupIgnores (self, options) {\n self.ignore = options.ignore || []\n\n if (!Array.isArray(self.ignore))\n self.ignore = [self.ignore]\n\n if (self.ignore.length) {\n self.ignore = self.ignore.map(ignoreMap)\n }\n}\n\n// ignore patterns are always in dot:true mode.\nfunction ignoreMap (pattern) {\n var gmatcher = null\n if (pattern.slice(-3) === '/**') {\n var gpattern = pattern.replace(/(\\/\\*\\*)+$/, '')\n gmatcher = new Minimatch(gpattern, { dot: true })\n }\n\n return {\n matcher: new Minimatch(pattern, { dot: true }),\n gmatcher: gmatcher\n }\n}\n\nfunction setopts (self, pattern, options) {\n if (!options)\n options = {}\n\n // base-matching: just use globstar for that.\n if (options.matchBase && -1 === pattern.indexOf(\"/\")) {\n if (options.noglobstar) {\n throw new Error(\"base matching requires globstar\")\n }\n pattern = \"**/\" + pattern\n }\n\n self.silent = !!options.silent\n self.pattern = pattern\n self.strict = options.strict !== false\n self.realpath = !!options.realpath\n self.realpathCache = options.realpathCache || Object.create(null)\n self.follow = !!options.follow\n self.dot = !!options.dot\n self.mark = !!options.mark\n self.nodir = !!options.nodir\n if (self.nodir)\n self.mark = true\n self.sync = !!options.sync\n self.nounique = !!options.nounique\n self.nonull = !!options.nonull\n self.nosort = !!options.nosort\n self.nocase = !!options.nocase\n self.stat = !!options.stat\n self.noprocess = !!options.noprocess\n self.absolute = !!options.absolute\n self.fs = options.fs || fs\n\n self.maxLength = options.maxLength || Infinity\n self.cache = options.cache || Object.create(null)\n self.statCache = options.statCache || Object.create(null)\n self.symlinks = options.symlinks || Object.create(null)\n\n setupIgnores(self, options)\n\n self.changedCwd = false\n var cwd = process.cwd()\n if (!ownProp(options, \"cwd\"))\n self.cwd = cwd\n else {\n self.cwd = path.resolve(options.cwd)\n self.changedCwd = self.cwd !== cwd\n }\n\n self.root = options.root || path.resolve(self.cwd, \"/\")\n self.root = path.resolve(self.root)\n if (process.platform === \"win32\")\n self.root = self.root.replace(/\\\\/g, \"/\")\n\n // TODO: is an absolute `cwd` supposed to be resolved against `root`?\n // e.g. { cwd: '/test', root: __dirname } === path.join(__dirname, '/test')\n self.cwdAbs = isAbsolute(self.cwd) ? self.cwd : makeAbs(self, self.cwd)\n if (process.platform === \"win32\")\n self.cwdAbs = self.cwdAbs.replace(/\\\\/g, \"/\")\n self.nomount = !!options.nomount\n\n // disable comments and negation in Minimatch.\n // Note that they are not supported in Glob itself anyway.\n options.nonegate = true\n options.nocomment = true\n\n self.minimatch = new Minimatch(pattern, options)\n self.options = self.minimatch.options\n}\n\nfunction finish (self) {\n var nou = self.nounique\n var all = nou ? [] : Object.create(null)\n\n for (var i = 0, l = self.matches.length; i < l; i ++) {\n var matches = self.matches[i]\n if (!matches || Object.keys(matches).length === 0) {\n if (self.nonull) {\n // do like the shell, and spit out the literal glob\n var literal = self.minimatch.globSet[i]\n if (nou)\n all.push(literal)\n else\n all[literal] = true\n }\n } else {\n // had matches\n var m = Object.keys(matches)\n if (nou)\n all.push.apply(all, m)\n else\n m.forEach(function (m) {\n all[m] = true\n })\n }\n }\n\n if (!nou)\n all = Object.keys(all)\n\n if (!self.nosort)\n all = all.sort(alphasort)\n\n // at *some* point we statted all of these\n if (self.mark) {\n for (var i = 0; i < all.length; i++) {\n all[i] = self._mark(all[i])\n }\n if (self.nodir) {\n all = all.filter(function (e) {\n var notDir = !(/\\/$/.test(e))\n var c = self.cache[e] || self.cache[makeAbs(self, e)]\n if (notDir && c)\n notDir = c !== 'DIR' && !Array.isArray(c)\n return notDir\n })\n }\n }\n\n if (self.ignore.length)\n all = all.filter(function(m) {\n return !isIgnored(self, m)\n })\n\n self.found = all\n}\n\nfunction mark (self, p) {\n var abs = makeAbs(self, p)\n var c = self.cache[abs]\n var m = p\n if (c) {\n var isDir = c === 'DIR' || Array.isArray(c)\n var slash = p.slice(-1) === '/'\n\n if (isDir && !slash)\n m += '/'\n else if (!isDir && slash)\n m = m.slice(0, -1)\n\n if (m !== p) {\n var mabs = makeAbs(self, m)\n self.statCache[mabs] = self.statCache[abs]\n self.cache[mabs] = self.cache[abs]\n }\n }\n\n return m\n}\n\n// lotta situps...\nfunction makeAbs (self, f) {\n var abs = f\n if (f.charAt(0) === '/') {\n abs = path.join(self.root, f)\n } else if (isAbsolute(f) || f === '') {\n abs = f\n } else if (self.changedCwd) {\n abs = path.resolve(self.cwd, f)\n } else {\n abs = path.resolve(f)\n }\n\n if (process.platform === 'win32')\n abs = abs.replace(/\\\\/g, '/')\n\n return abs\n}\n\n\n// Return true, if pattern ends with globstar '**', for the accompanying parent directory.\n// Ex:- If node_modules/** is the pattern, add 'node_modules' to ignore list along with it's contents\nfunction isIgnored (self, path) {\n if (!self.ignore.length)\n return false\n\n return self.ignore.some(function(item) {\n return item.matcher.match(path) || !!(item.gmatcher && item.gmatcher.match(path))\n })\n}\n\nfunction childrenIgnored (self, path) {\n if (!self.ignore.length)\n return false\n\n return self.ignore.some(function(item) {\n return !!(item.gmatcher && item.gmatcher.match(path))\n })\n}\n","// Approach:\n//\n// 1. Get the minimatch set\n// 2. For each pattern in the set, PROCESS(pattern, false)\n// 3. Store matches per-set, then uniq them\n//\n// PROCESS(pattern, inGlobStar)\n// Get the first [n] items from pattern that are all strings\n// Join these together. This is PREFIX.\n// If there is no more remaining, then stat(PREFIX) and\n// add to matches if it succeeds. END.\n//\n// If inGlobStar and PREFIX is symlink and points to dir\n// set ENTRIES = []\n// else readdir(PREFIX) as ENTRIES\n// If fail, END\n//\n// with ENTRIES\n// If pattern[n] is GLOBSTAR\n// // handle the case where the globstar match is empty\n// // by pruning it out, and testing the resulting pattern\n// PROCESS(pattern[0..n] + pattern[n+1 .. $], false)\n// // handle other cases.\n// for ENTRY in ENTRIES (not dotfiles)\n// // attach globstar + tail onto the entry\n// // Mark that this entry is a globstar match\n// PROCESS(pattern[0..n] + ENTRY + pattern[n .. $], true)\n//\n// else // not globstar\n// for ENTRY in ENTRIES (not dotfiles, unless pattern[n] is dot)\n// Test ENTRY against pattern[n]\n// If fails, continue\n// If passes, PROCESS(pattern[0..n] + item + pattern[n+1 .. $])\n//\n// Caveat:\n// Cache all stats and readdirs results to minimize syscall. Since all\n// we ever care about is existence and directory-ness, we can just keep\n// `true` for files, and [children,...] for directories, or `false` for\n// things that don't exist.\n\nmodule.exports = glob\n\nvar rp = require('fs.realpath')\nvar minimatch = require('minimatch')\nvar Minimatch = minimatch.Minimatch\nvar inherits = require('inherits')\nvar EE = require('events').EventEmitter\nvar path = require('path')\nvar assert = require('assert')\nvar isAbsolute = require('path-is-absolute')\nvar globSync = require('./sync.js')\nvar common = require('./common.js')\nvar setopts = common.setopts\nvar ownProp = common.ownProp\nvar inflight = require('inflight')\nvar util = require('util')\nvar childrenIgnored = common.childrenIgnored\nvar isIgnored = common.isIgnored\n\nvar once = require('once')\n\nfunction glob (pattern, options, cb) {\n if (typeof options === 'function') cb = options, options = {}\n if (!options) options = {}\n\n if (options.sync) {\n if (cb)\n throw new TypeError('callback provided to sync glob')\n return globSync(pattern, options)\n }\n\n return new Glob(pattern, options, cb)\n}\n\nglob.sync = globSync\nvar GlobSync = glob.GlobSync = globSync.GlobSync\n\n// old api surface\nglob.glob = glob\n\nfunction extend (origin, add) {\n if (add === null || typeof add !== 'object') {\n return origin\n }\n\n var keys = Object.keys(add)\n var i = keys.length\n while (i--) {\n origin[keys[i]] = add[keys[i]]\n }\n return origin\n}\n\nglob.hasMagic = function (pattern, options_) {\n var options = extend({}, options_)\n options.noprocess = true\n\n var g = new Glob(pattern, options)\n var set = g.minimatch.set\n\n if (!pattern)\n return false\n\n if (set.length > 1)\n return true\n\n for (var j = 0; j < set[0].length; j++) {\n if (typeof set[0][j] !== 'string')\n return true\n }\n\n return false\n}\n\nglob.Glob = Glob\ninherits(Glob, EE)\nfunction Glob (pattern, options, cb) {\n if (typeof options === 'function') {\n cb = options\n options = null\n }\n\n if (options && options.sync) {\n if (cb)\n throw new TypeError('callback provided to sync glob')\n return new GlobSync(pattern, options)\n }\n\n if (!(this instanceof Glob))\n return new Glob(pattern, options, cb)\n\n setopts(this, pattern, options)\n this._didRealPath = false\n\n // process each pattern in the minimatch set\n var n = this.minimatch.set.length\n\n // The matches are stored as {: true,...} so that\n // duplicates are automagically pruned.\n // Later, we do an Object.keys() on these.\n // Keep them as a list so we can fill in when nonull is set.\n this.matches = new Array(n)\n\n if (typeof cb === 'function') {\n cb = once(cb)\n this.on('error', cb)\n this.on('end', function (matches) {\n cb(null, matches)\n })\n }\n\n var self = this\n this._processing = 0\n\n this._emitQueue = []\n this._processQueue = []\n this.paused = false\n\n if (this.noprocess)\n return this\n\n if (n === 0)\n return done()\n\n var sync = true\n for (var i = 0; i < n; i ++) {\n this._process(this.minimatch.set[i], i, false, done)\n }\n sync = false\n\n function done () {\n --self._processing\n if (self._processing <= 0) {\n if (sync) {\n process.nextTick(function () {\n self._finish()\n })\n } else {\n self._finish()\n }\n }\n }\n}\n\nGlob.prototype._finish = function () {\n assert(this instanceof Glob)\n if (this.aborted)\n return\n\n if (this.realpath && !this._didRealpath)\n return this._realpath()\n\n common.finish(this)\n this.emit('end', this.found)\n}\n\nGlob.prototype._realpath = function () {\n if (this._didRealpath)\n return\n\n this._didRealpath = true\n\n var n = this.matches.length\n if (n === 0)\n return this._finish()\n\n var self = this\n for (var i = 0; i < this.matches.length; i++)\n this._realpathSet(i, next)\n\n function next () {\n if (--n === 0)\n self._finish()\n }\n}\n\nGlob.prototype._realpathSet = function (index, cb) {\n var matchset = this.matches[index]\n if (!matchset)\n return cb()\n\n var found = Object.keys(matchset)\n var self = this\n var n = found.length\n\n if (n === 0)\n return cb()\n\n var set = this.matches[index] = Object.create(null)\n found.forEach(function (p, i) {\n // If there's a problem with the stat, then it means that\n // one or more of the links in the realpath couldn't be\n // resolved. just return the abs value in that case.\n p = self._makeAbs(p)\n rp.realpath(p, self.realpathCache, function (er, real) {\n if (!er)\n set[real] = true\n else if (er.syscall === 'stat')\n set[p] = true\n else\n self.emit('error', er) // srsly wtf right here\n\n if (--n === 0) {\n self.matches[index] = set\n cb()\n }\n })\n })\n}\n\nGlob.prototype._mark = function (p) {\n return common.mark(this, p)\n}\n\nGlob.prototype._makeAbs = function (f) {\n return common.makeAbs(this, f)\n}\n\nGlob.prototype.abort = function () {\n this.aborted = true\n this.emit('abort')\n}\n\nGlob.prototype.pause = function () {\n if (!this.paused) {\n this.paused = true\n this.emit('pause')\n }\n}\n\nGlob.prototype.resume = function () {\n if (this.paused) {\n this.emit('resume')\n this.paused = false\n if (this._emitQueue.length) {\n var eq = this._emitQueue.slice(0)\n this._emitQueue.length = 0\n for (var i = 0; i < eq.length; i ++) {\n var e = eq[i]\n this._emitMatch(e[0], e[1])\n }\n }\n if (this._processQueue.length) {\n var pq = this._processQueue.slice(0)\n this._processQueue.length = 0\n for (var i = 0; i < pq.length; i ++) {\n var p = pq[i]\n this._processing--\n this._process(p[0], p[1], p[2], p[3])\n }\n }\n }\n}\n\nGlob.prototype._process = function (pattern, index, inGlobStar, cb) {\n assert(this instanceof Glob)\n assert(typeof cb === 'function')\n\n if (this.aborted)\n return\n\n this._processing++\n if (this.paused) {\n this._processQueue.push([pattern, index, inGlobStar, cb])\n return\n }\n\n //console.error('PROCESS %d', this._processing, pattern)\n\n // Get the first [n] parts of pattern that are all strings.\n var n = 0\n while (typeof pattern[n] === 'string') {\n n ++\n }\n // now n is the index of the first one that is *not* a string.\n\n // see if there's anything else\n var prefix\n switch (n) {\n // if not, then this is rather simple\n case pattern.length:\n this._processSimple(pattern.join('/'), index, cb)\n return\n\n case 0:\n // pattern *starts* with some non-trivial item.\n // going to readdir(cwd), but not include the prefix in matches.\n prefix = null\n break\n\n default:\n // pattern has some string bits in the front.\n // whatever it starts with, whether that's 'absolute' like /foo/bar,\n // or 'relative' like '../baz'\n prefix = pattern.slice(0, n).join('/')\n break\n }\n\n var remain = pattern.slice(n)\n\n // get the list of entries.\n var read\n if (prefix === null)\n read = '.'\n else if (isAbsolute(prefix) || isAbsolute(pattern.join('/'))) {\n if (!prefix || !isAbsolute(prefix))\n prefix = '/' + prefix\n read = prefix\n } else\n read = prefix\n\n var abs = this._makeAbs(read)\n\n //if ignored, skip _processing\n if (childrenIgnored(this, read))\n return cb()\n\n var isGlobStar = remain[0] === minimatch.GLOBSTAR\n if (isGlobStar)\n this._processGlobStar(prefix, read, abs, remain, index, inGlobStar, cb)\n else\n this._processReaddir(prefix, read, abs, remain, index, inGlobStar, cb)\n}\n\nGlob.prototype._processReaddir = function (prefix, read, abs, remain, index, inGlobStar, cb) {\n var self = this\n this._readdir(abs, inGlobStar, function (er, entries) {\n return self._processReaddir2(prefix, read, abs, remain, index, inGlobStar, entries, cb)\n })\n}\n\nGlob.prototype._processReaddir2 = function (prefix, read, abs, remain, index, inGlobStar, entries, cb) {\n\n // if the abs isn't a dir, then nothing can match!\n if (!entries)\n return cb()\n\n // It will only match dot entries if it starts with a dot, or if\n // dot is set. Stuff like @(.foo|.bar) isn't allowed.\n var pn = remain[0]\n var negate = !!this.minimatch.negate\n var rawGlob = pn._glob\n var dotOk = this.dot || rawGlob.charAt(0) === '.'\n\n var matchedEntries = []\n for (var i = 0; i < entries.length; i++) {\n var e = entries[i]\n if (e.charAt(0) !== '.' || dotOk) {\n var m\n if (negate && !prefix) {\n m = !e.match(pn)\n } else {\n m = e.match(pn)\n }\n if (m)\n matchedEntries.push(e)\n }\n }\n\n //console.error('prd2', prefix, entries, remain[0]._glob, matchedEntries)\n\n var len = matchedEntries.length\n // If there are no matched entries, then nothing matches.\n if (len === 0)\n return cb()\n\n // if this is the last remaining pattern bit, then no need for\n // an additional stat *unless* the user has specified mark or\n // stat explicitly. We know they exist, since readdir returned\n // them.\n\n if (remain.length === 1 && !this.mark && !this.stat) {\n if (!this.matches[index])\n this.matches[index] = Object.create(null)\n\n for (var i = 0; i < len; i ++) {\n var e = matchedEntries[i]\n if (prefix) {\n if (prefix !== '/')\n e = prefix + '/' + e\n else\n e = prefix + e\n }\n\n if (e.charAt(0) === '/' && !this.nomount) {\n e = path.join(this.root, e)\n }\n this._emitMatch(index, e)\n }\n // This was the last one, and no stats were needed\n return cb()\n }\n\n // now test all matched entries as stand-ins for that part\n // of the pattern.\n remain.shift()\n for (var i = 0; i < len; i ++) {\n var e = matchedEntries[i]\n var newPattern\n if (prefix) {\n if (prefix !== '/')\n e = prefix + '/' + e\n else\n e = prefix + e\n }\n this._process([e].concat(remain), index, inGlobStar, cb)\n }\n cb()\n}\n\nGlob.prototype._emitMatch = function (index, e) {\n if (this.aborted)\n return\n\n if (isIgnored(this, e))\n return\n\n if (this.paused) {\n this._emitQueue.push([index, e])\n return\n }\n\n var abs = isAbsolute(e) ? e : this._makeAbs(e)\n\n if (this.mark)\n e = this._mark(e)\n\n if (this.absolute)\n e = abs\n\n if (this.matches[index][e])\n return\n\n if (this.nodir) {\n var c = this.cache[abs]\n if (c === 'DIR' || Array.isArray(c))\n return\n }\n\n this.matches[index][e] = true\n\n var st = this.statCache[abs]\n if (st)\n this.emit('stat', e, st)\n\n this.emit('match', e)\n}\n\nGlob.prototype._readdirInGlobStar = function (abs, cb) {\n if (this.aborted)\n return\n\n // follow all symlinked directories forever\n // just proceed as if this is a non-globstar situation\n if (this.follow)\n return this._readdir(abs, false, cb)\n\n var lstatkey = 'lstat\\0' + abs\n var self = this\n var lstatcb = inflight(lstatkey, lstatcb_)\n\n if (lstatcb)\n self.fs.lstat(abs, lstatcb)\n\n function lstatcb_ (er, lstat) {\n if (er && er.code === 'ENOENT')\n return cb()\n\n var isSym = lstat && lstat.isSymbolicLink()\n self.symlinks[abs] = isSym\n\n // If it's not a symlink or a dir, then it's definitely a regular file.\n // don't bother doing a readdir in that case.\n if (!isSym && lstat && !lstat.isDirectory()) {\n self.cache[abs] = 'FILE'\n cb()\n } else\n self._readdir(abs, false, cb)\n }\n}\n\nGlob.prototype._readdir = function (abs, inGlobStar, cb) {\n if (this.aborted)\n return\n\n cb = inflight('readdir\\0'+abs+'\\0'+inGlobStar, cb)\n if (!cb)\n return\n\n //console.error('RD %j %j', +inGlobStar, abs)\n if (inGlobStar && !ownProp(this.symlinks, abs))\n return this._readdirInGlobStar(abs, cb)\n\n if (ownProp(this.cache, abs)) {\n var c = this.cache[abs]\n if (!c || c === 'FILE')\n return cb()\n\n if (Array.isArray(c))\n return cb(null, c)\n }\n\n var self = this\n self.fs.readdir(abs, readdirCb(this, abs, cb))\n}\n\nfunction readdirCb (self, abs, cb) {\n return function (er, entries) {\n if (er)\n self._readdirError(abs, er, cb)\n else\n self._readdirEntries(abs, entries, cb)\n }\n}\n\nGlob.prototype._readdirEntries = function (abs, entries, cb) {\n if (this.aborted)\n return\n\n // if we haven't asked to stat everything, then just\n // assume that everything in there exists, so we can avoid\n // having to stat it a second time.\n if (!this.mark && !this.stat) {\n for (var i = 0; i < entries.length; i ++) {\n var e = entries[i]\n if (abs === '/')\n e = abs + e\n else\n e = abs + '/' + e\n this.cache[e] = true\n }\n }\n\n this.cache[abs] = entries\n return cb(null, entries)\n}\n\nGlob.prototype._readdirError = function (f, er, cb) {\n if (this.aborted)\n return\n\n // handle errors, and cache the information\n switch (er.code) {\n case 'ENOTSUP': // https://github.com/isaacs/node-glob/issues/205\n case 'ENOTDIR': // totally normal. means it *does* exist.\n var abs = this._makeAbs(f)\n this.cache[abs] = 'FILE'\n if (abs === this.cwdAbs) {\n var error = new Error(er.code + ' invalid cwd ' + this.cwd)\n error.path = this.cwd\n error.code = er.code\n this.emit('error', error)\n this.abort()\n }\n break\n\n case 'ENOENT': // not terribly unusual\n case 'ELOOP':\n case 'ENAMETOOLONG':\n case 'UNKNOWN':\n this.cache[this._makeAbs(f)] = false\n break\n\n default: // some unusual error. Treat as failure.\n this.cache[this._makeAbs(f)] = false\n if (this.strict) {\n this.emit('error', er)\n // If the error is handled, then we abort\n // if not, we threw out of here\n this.abort()\n }\n if (!this.silent)\n console.error('glob error', er)\n break\n }\n\n return cb()\n}\n\nGlob.prototype._processGlobStar = function (prefix, read, abs, remain, index, inGlobStar, cb) {\n var self = this\n this._readdir(abs, inGlobStar, function (er, entries) {\n self._processGlobStar2(prefix, read, abs, remain, index, inGlobStar, entries, cb)\n })\n}\n\n\nGlob.prototype._processGlobStar2 = function (prefix, read, abs, remain, index, inGlobStar, entries, cb) {\n //console.error('pgs2', prefix, remain[0], entries)\n\n // no entries means not a dir, so it can never have matches\n // foo.txt/** doesn't match foo.txt\n if (!entries)\n return cb()\n\n // test without the globstar, and with every child both below\n // and replacing the globstar.\n var remainWithoutGlobStar = remain.slice(1)\n var gspref = prefix ? [ prefix ] : []\n var noGlobStar = gspref.concat(remainWithoutGlobStar)\n\n // the noGlobStar pattern exits the inGlobStar state\n this._process(noGlobStar, index, false, cb)\n\n var isSym = this.symlinks[abs]\n var len = entries.length\n\n // If it's a symlink, and we're in a globstar, then stop\n if (isSym && inGlobStar)\n return cb()\n\n for (var i = 0; i < len; i++) {\n var e = entries[i]\n if (e.charAt(0) === '.' && !this.dot)\n continue\n\n // these two cases enter the inGlobStar state\n var instead = gspref.concat(entries[i], remainWithoutGlobStar)\n this._process(instead, index, true, cb)\n\n var below = gspref.concat(entries[i], remain)\n this._process(below, index, true, cb)\n }\n\n cb()\n}\n\nGlob.prototype._processSimple = function (prefix, index, cb) {\n // XXX review this. Shouldn't it be doing the mounting etc\n // before doing stat? kinda weird?\n var self = this\n this._stat(prefix, function (er, exists) {\n self._processSimple2(prefix, index, er, exists, cb)\n })\n}\nGlob.prototype._processSimple2 = function (prefix, index, er, exists, cb) {\n\n //console.error('ps2', prefix, exists)\n\n if (!this.matches[index])\n this.matches[index] = Object.create(null)\n\n // If it doesn't exist, then just mark the lack of results\n if (!exists)\n return cb()\n\n if (prefix && isAbsolute(prefix) && !this.nomount) {\n var trail = /[\\/\\\\]$/.test(prefix)\n if (prefix.charAt(0) === '/') {\n prefix = path.join(this.root, prefix)\n } else {\n prefix = path.resolve(this.root, prefix)\n if (trail)\n prefix += '/'\n }\n }\n\n if (process.platform === 'win32')\n prefix = prefix.replace(/\\\\/g, '/')\n\n // Mark this as a match\n this._emitMatch(index, prefix)\n cb()\n}\n\n// Returns either 'DIR', 'FILE', or false\nGlob.prototype._stat = function (f, cb) {\n var abs = this._makeAbs(f)\n var needDir = f.slice(-1) === '/'\n\n if (f.length > this.maxLength)\n return cb()\n\n if (!this.stat && ownProp(this.cache, abs)) {\n var c = this.cache[abs]\n\n if (Array.isArray(c))\n c = 'DIR'\n\n // It exists, but maybe not how we need it\n if (!needDir || c === 'DIR')\n return cb(null, c)\n\n if (needDir && c === 'FILE')\n return cb()\n\n // otherwise we have to stat, because maybe c=true\n // if we know it exists, but not what it is.\n }\n\n var exists\n var stat = this.statCache[abs]\n if (stat !== undefined) {\n if (stat === false)\n return cb(null, stat)\n else {\n var type = stat.isDirectory() ? 'DIR' : 'FILE'\n if (needDir && type === 'FILE')\n return cb()\n else\n return cb(null, type, stat)\n }\n }\n\n var self = this\n var statcb = inflight('stat\\0' + abs, lstatcb_)\n if (statcb)\n self.fs.lstat(abs, statcb)\n\n function lstatcb_ (er, lstat) {\n if (lstat && lstat.isSymbolicLink()) {\n // If it's a symlink, then treat it as the target, unless\n // the target does not exist, then treat it as a file.\n return self.fs.stat(abs, function (er, stat) {\n if (er)\n self._stat2(f, abs, null, lstat, cb)\n else\n self._stat2(f, abs, er, stat, cb)\n })\n } else {\n self._stat2(f, abs, er, lstat, cb)\n }\n }\n}\n\nGlob.prototype._stat2 = function (f, abs, er, stat, cb) {\n if (er && (er.code === 'ENOENT' || er.code === 'ENOTDIR')) {\n this.statCache[abs] = false\n return cb()\n }\n\n var needDir = f.slice(-1) === '/'\n this.statCache[abs] = stat\n\n if (abs.slice(-1) === '/' && stat && !stat.isDirectory())\n return cb(null, false, stat)\n\n var c = true\n if (stat)\n c = stat.isDirectory() ? 'DIR' : 'FILE'\n this.cache[abs] = this.cache[abs] || c\n\n if (needDir && c === 'FILE')\n return cb()\n\n return cb(null, c, stat)\n}\n","module.exports = globSync\nglobSync.GlobSync = GlobSync\n\nvar rp = require('fs.realpath')\nvar minimatch = require('minimatch')\nvar Minimatch = minimatch.Minimatch\nvar Glob = require('./glob.js').Glob\nvar util = require('util')\nvar path = require('path')\nvar assert = require('assert')\nvar isAbsolute = require('path-is-absolute')\nvar common = require('./common.js')\nvar setopts = common.setopts\nvar ownProp = common.ownProp\nvar childrenIgnored = common.childrenIgnored\nvar isIgnored = common.isIgnored\n\nfunction globSync (pattern, options) {\n if (typeof options === 'function' || arguments.length === 3)\n throw new TypeError('callback provided to sync glob\\n'+\n 'See: https://github.com/isaacs/node-glob/issues/167')\n\n return new GlobSync(pattern, options).found\n}\n\nfunction GlobSync (pattern, options) {\n if (!pattern)\n throw new Error('must provide pattern')\n\n if (typeof options === 'function' || arguments.length === 3)\n throw new TypeError('callback provided to sync glob\\n'+\n 'See: https://github.com/isaacs/node-glob/issues/167')\n\n if (!(this instanceof GlobSync))\n return new GlobSync(pattern, options)\n\n setopts(this, pattern, options)\n\n if (this.noprocess)\n return this\n\n var n = this.minimatch.set.length\n this.matches = new Array(n)\n for (var i = 0; i < n; i ++) {\n this._process(this.minimatch.set[i], i, false)\n }\n this._finish()\n}\n\nGlobSync.prototype._finish = function () {\n assert(this instanceof GlobSync)\n if (this.realpath) {\n var self = this\n this.matches.forEach(function (matchset, index) {\n var set = self.matches[index] = Object.create(null)\n for (var p in matchset) {\n try {\n p = self._makeAbs(p)\n var real = rp.realpathSync(p, self.realpathCache)\n set[real] = true\n } catch (er) {\n if (er.syscall === 'stat')\n set[self._makeAbs(p)] = true\n else\n throw er\n }\n }\n })\n }\n common.finish(this)\n}\n\n\nGlobSync.prototype._process = function (pattern, index, inGlobStar) {\n assert(this instanceof GlobSync)\n\n // Get the first [n] parts of pattern that are all strings.\n var n = 0\n while (typeof pattern[n] === 'string') {\n n ++\n }\n // now n is the index of the first one that is *not* a string.\n\n // See if there's anything else\n var prefix\n switch (n) {\n // if not, then this is rather simple\n case pattern.length:\n this._processSimple(pattern.join('/'), index)\n return\n\n case 0:\n // pattern *starts* with some non-trivial item.\n // going to readdir(cwd), but not include the prefix in matches.\n prefix = null\n break\n\n default:\n // pattern has some string bits in the front.\n // whatever it starts with, whether that's 'absolute' like /foo/bar,\n // or 'relative' like '../baz'\n prefix = pattern.slice(0, n).join('/')\n break\n }\n\n var remain = pattern.slice(n)\n\n // get the list of entries.\n var read\n if (prefix === null)\n read = '.'\n else if (isAbsolute(prefix) || isAbsolute(pattern.join('/'))) {\n if (!prefix || !isAbsolute(prefix))\n prefix = '/' + prefix\n read = prefix\n } else\n read = prefix\n\n var abs = this._makeAbs(read)\n\n //if ignored, skip processing\n if (childrenIgnored(this, read))\n return\n\n var isGlobStar = remain[0] === minimatch.GLOBSTAR\n if (isGlobStar)\n this._processGlobStar(prefix, read, abs, remain, index, inGlobStar)\n else\n this._processReaddir(prefix, read, abs, remain, index, inGlobStar)\n}\n\n\nGlobSync.prototype._processReaddir = function (prefix, read, abs, remain, index, inGlobStar) {\n var entries = this._readdir(abs, inGlobStar)\n\n // if the abs isn't a dir, then nothing can match!\n if (!entries)\n return\n\n // It will only match dot entries if it starts with a dot, or if\n // dot is set. Stuff like @(.foo|.bar) isn't allowed.\n var pn = remain[0]\n var negate = !!this.minimatch.negate\n var rawGlob = pn._glob\n var dotOk = this.dot || rawGlob.charAt(0) === '.'\n\n var matchedEntries = []\n for (var i = 0; i < entries.length; i++) {\n var e = entries[i]\n if (e.charAt(0) !== '.' || dotOk) {\n var m\n if (negate && !prefix) {\n m = !e.match(pn)\n } else {\n m = e.match(pn)\n }\n if (m)\n matchedEntries.push(e)\n }\n }\n\n var len = matchedEntries.length\n // If there are no matched entries, then nothing matches.\n if (len === 0)\n return\n\n // if this is the last remaining pattern bit, then no need for\n // an additional stat *unless* the user has specified mark or\n // stat explicitly. We know they exist, since readdir returned\n // them.\n\n if (remain.length === 1 && !this.mark && !this.stat) {\n if (!this.matches[index])\n this.matches[index] = Object.create(null)\n\n for (var i = 0; i < len; i ++) {\n var e = matchedEntries[i]\n if (prefix) {\n if (prefix.slice(-1) !== '/')\n e = prefix + '/' + e\n else\n e = prefix + e\n }\n\n if (e.charAt(0) === '/' && !this.nomount) {\n e = path.join(this.root, e)\n }\n this._emitMatch(index, e)\n }\n // This was the last one, and no stats were needed\n return\n }\n\n // now test all matched entries as stand-ins for that part\n // of the pattern.\n remain.shift()\n for (var i = 0; i < len; i ++) {\n var e = matchedEntries[i]\n var newPattern\n if (prefix)\n newPattern = [prefix, e]\n else\n newPattern = [e]\n this._process(newPattern.concat(remain), index, inGlobStar)\n }\n}\n\n\nGlobSync.prototype._emitMatch = function (index, e) {\n if (isIgnored(this, e))\n return\n\n var abs = this._makeAbs(e)\n\n if (this.mark)\n e = this._mark(e)\n\n if (this.absolute) {\n e = abs\n }\n\n if (this.matches[index][e])\n return\n\n if (this.nodir) {\n var c = this.cache[abs]\n if (c === 'DIR' || Array.isArray(c))\n return\n }\n\n this.matches[index][e] = true\n\n if (this.stat)\n this._stat(e)\n}\n\n\nGlobSync.prototype._readdirInGlobStar = function (abs) {\n // follow all symlinked directories forever\n // just proceed as if this is a non-globstar situation\n if (this.follow)\n return this._readdir(abs, false)\n\n var entries\n var lstat\n var stat\n try {\n lstat = this.fs.lstatSync(abs)\n } catch (er) {\n if (er.code === 'ENOENT') {\n // lstat failed, doesn't exist\n return null\n }\n }\n\n var isSym = lstat && lstat.isSymbolicLink()\n this.symlinks[abs] = isSym\n\n // If it's not a symlink or a dir, then it's definitely a regular file.\n // don't bother doing a readdir in that case.\n if (!isSym && lstat && !lstat.isDirectory())\n this.cache[abs] = 'FILE'\n else\n entries = this._readdir(abs, false)\n\n return entries\n}\n\nGlobSync.prototype._readdir = function (abs, inGlobStar) {\n var entries\n\n if (inGlobStar && !ownProp(this.symlinks, abs))\n return this._readdirInGlobStar(abs)\n\n if (ownProp(this.cache, abs)) {\n var c = this.cache[abs]\n if (!c || c === 'FILE')\n return null\n\n if (Array.isArray(c))\n return c\n }\n\n try {\n return this._readdirEntries(abs, this.fs.readdirSync(abs))\n } catch (er) {\n this._readdirError(abs, er)\n return null\n }\n}\n\nGlobSync.prototype._readdirEntries = function (abs, entries) {\n // if we haven't asked to stat everything, then just\n // assume that everything in there exists, so we can avoid\n // having to stat it a second time.\n if (!this.mark && !this.stat) {\n for (var i = 0; i < entries.length; i ++) {\n var e = entries[i]\n if (abs === '/')\n e = abs + e\n else\n e = abs + '/' + e\n this.cache[e] = true\n }\n }\n\n this.cache[abs] = entries\n\n // mark and cache dir-ness\n return entries\n}\n\nGlobSync.prototype._readdirError = function (f, er) {\n // handle errors, and cache the information\n switch (er.code) {\n case 'ENOTSUP': // https://github.com/isaacs/node-glob/issues/205\n case 'ENOTDIR': // totally normal. means it *does* exist.\n var abs = this._makeAbs(f)\n this.cache[abs] = 'FILE'\n if (abs === this.cwdAbs) {\n var error = new Error(er.code + ' invalid cwd ' + this.cwd)\n error.path = this.cwd\n error.code = er.code\n throw error\n }\n break\n\n case 'ENOENT': // not terribly unusual\n case 'ELOOP':\n case 'ENAMETOOLONG':\n case 'UNKNOWN':\n this.cache[this._makeAbs(f)] = false\n break\n\n default: // some unusual error. Treat as failure.\n this.cache[this._makeAbs(f)] = false\n if (this.strict)\n throw er\n if (!this.silent)\n console.error('glob error', er)\n break\n }\n}\n\nGlobSync.prototype._processGlobStar = function (prefix, read, abs, remain, index, inGlobStar) {\n\n var entries = this._readdir(abs, inGlobStar)\n\n // no entries means not a dir, so it can never have matches\n // foo.txt/** doesn't match foo.txt\n if (!entries)\n return\n\n // test without the globstar, and with every child both below\n // and replacing the globstar.\n var remainWithoutGlobStar = remain.slice(1)\n var gspref = prefix ? [ prefix ] : []\n var noGlobStar = gspref.concat(remainWithoutGlobStar)\n\n // the noGlobStar pattern exits the inGlobStar state\n this._process(noGlobStar, index, false)\n\n var len = entries.length\n var isSym = this.symlinks[abs]\n\n // If it's a symlink, and we're in a globstar, then stop\n if (isSym && inGlobStar)\n return\n\n for (var i = 0; i < len; i++) {\n var e = entries[i]\n if (e.charAt(0) === '.' && !this.dot)\n continue\n\n // these two cases enter the inGlobStar state\n var instead = gspref.concat(entries[i], remainWithoutGlobStar)\n this._process(instead, index, true)\n\n var below = gspref.concat(entries[i], remain)\n this._process(below, index, true)\n }\n}\n\nGlobSync.prototype._processSimple = function (prefix, index) {\n // XXX review this. Shouldn't it be doing the mounting etc\n // before doing stat? kinda weird?\n var exists = this._stat(prefix)\n\n if (!this.matches[index])\n this.matches[index] = Object.create(null)\n\n // If it doesn't exist, then just mark the lack of results\n if (!exists)\n return\n\n if (prefix && isAbsolute(prefix) && !this.nomount) {\n var trail = /[\\/\\\\]$/.test(prefix)\n if (prefix.charAt(0) === '/') {\n prefix = path.join(this.root, prefix)\n } else {\n prefix = path.resolve(this.root, prefix)\n if (trail)\n prefix += '/'\n }\n }\n\n if (process.platform === 'win32')\n prefix = prefix.replace(/\\\\/g, '/')\n\n // Mark this as a match\n this._emitMatch(index, prefix)\n}\n\n// Returns either 'DIR', 'FILE', or false\nGlobSync.prototype._stat = function (f) {\n var abs = this._makeAbs(f)\n var needDir = f.slice(-1) === '/'\n\n if (f.length > this.maxLength)\n return false\n\n if (!this.stat && ownProp(this.cache, abs)) {\n var c = this.cache[abs]\n\n if (Array.isArray(c))\n c = 'DIR'\n\n // It exists, but maybe not how we need it\n if (!needDir || c === 'DIR')\n return c\n\n if (needDir && c === 'FILE')\n return false\n\n // otherwise we have to stat, because maybe c=true\n // if we know it exists, but not what it is.\n }\n\n var exists\n var stat = this.statCache[abs]\n if (!stat) {\n var lstat\n try {\n lstat = this.fs.lstatSync(abs)\n } catch (er) {\n if (er && (er.code === 'ENOENT' || er.code === 'ENOTDIR')) {\n this.statCache[abs] = false\n return false\n }\n }\n\n if (lstat && lstat.isSymbolicLink()) {\n try {\n stat = this.fs.statSync(abs)\n } catch (er) {\n stat = lstat\n }\n } else {\n stat = lstat\n }\n }\n\n this.statCache[abs] = stat\n\n var c = true\n if (stat)\n c = stat.isDirectory() ? 'DIR' : 'FILE'\n\n this.cache[abs] = this.cache[abs] || c\n\n if (needDir && c === 'FILE')\n return false\n\n return c\n}\n\nGlobSync.prototype._mark = function (p) {\n return common.mark(this, p)\n}\n\nGlobSync.prototype._makeAbs = function (f) {\n return common.makeAbs(this, f)\n}\n","var wrappy = require('wrappy')\nvar reqs = Object.create(null)\nvar once = require('once')\n\nmodule.exports = wrappy(inflight)\n\nfunction inflight (key, cb) {\n if (reqs[key]) {\n reqs[key].push(cb)\n return null\n } else {\n reqs[key] = [cb]\n return makeres(key)\n }\n}\n\nfunction makeres (key) {\n return once(function RES () {\n var cbs = reqs[key]\n var len = cbs.length\n var args = slice(arguments)\n\n // XXX It's somewhat ambiguous whether a new callback added in this\n // pass should be queued for later execution if something in the\n // list of callbacks throws, or if it should just be discarded.\n // However, it's such an edge case that it hardly matters, and either\n // choice is likely as surprising as the other.\n // As it happens, we do go ahead and schedule it for later execution.\n try {\n for (var i = 0; i < len; i++) {\n cbs[i].apply(null, args)\n }\n } finally {\n if (cbs.length > len) {\n // added more in the interim.\n // de-zalgo, just in case, but don't call again.\n cbs.splice(0, len)\n process.nextTick(function () {\n RES.apply(null, args)\n })\n } else {\n delete reqs[key]\n }\n }\n })\n}\n\nfunction slice (args) {\n var length = args.length\n var array = []\n\n for (var i = 0; i < length; i++) array[i] = args[i]\n return array\n}\n","try {\n var util = require('util');\n /* istanbul ignore next */\n if (typeof util.inherits !== 'function') throw '';\n module.exports = util.inherits;\n} catch (e) {\n /* istanbul ignore next */\n module.exports = require('./inherits_browser.js');\n}\n","if (typeof Object.create === 'function') {\n // implementation from standard node.js 'util' module\n module.exports = function inherits(ctor, superCtor) {\n if (superCtor) {\n ctor.super_ = superCtor\n ctor.prototype = Object.create(superCtor.prototype, {\n constructor: {\n value: ctor,\n enumerable: false,\n writable: true,\n configurable: true\n }\n })\n }\n };\n} else {\n // old school shim for old browsers\n module.exports = function inherits(ctor, superCtor) {\n if (superCtor) {\n ctor.super_ = superCtor\n var TempCtor = function () {}\n TempCtor.prototype = superCtor.prototype\n ctor.prototype = new TempCtor()\n ctor.prototype.constructor = ctor\n }\n }\n}\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\n/*!\n * is-plain-object \n *\n * Copyright (c) 2014-2017, Jon Schlinkert.\n * Released under the MIT License.\n */\n\nfunction isObject(o) {\n return Object.prototype.toString.call(o) === '[object Object]';\n}\n\nfunction isPlainObject(o) {\n var ctor,prot;\n\n if (isObject(o) === false) return false;\n\n // If has modified constructor\n ctor = o.constructor;\n if (ctor === undefined) return true;\n\n // If has modified prototype\n prot = ctor.prototype;\n if (isObject(prot) === false) return false;\n\n // If constructor does not have an Object-specific method\n if (prot.hasOwnProperty('isPrototypeOf') === false) {\n return false;\n }\n\n // Most likely a plain Object\n return true;\n}\n\nexports.isPlainObject = isPlainObject;\n","module.exports = minimatch\nminimatch.Minimatch = Minimatch\n\nvar path = { sep: '/' }\ntry {\n path = require('path')\n} catch (er) {}\n\nvar GLOBSTAR = minimatch.GLOBSTAR = Minimatch.GLOBSTAR = {}\nvar expand = require('brace-expansion')\n\nvar plTypes = {\n '!': { open: '(?:(?!(?:', close: '))[^/]*?)'},\n '?': { open: '(?:', close: ')?' },\n '+': { open: '(?:', close: ')+' },\n '*': { open: '(?:', close: ')*' },\n '@': { open: '(?:', close: ')' }\n}\n\n// any single thing other than /\n// don't need to escape / when using new RegExp()\nvar qmark = '[^/]'\n\n// * => any number of characters\nvar star = qmark + '*?'\n\n// ** when dots are allowed. Anything goes, except .. and .\n// not (^ or / followed by one or two dots followed by $ or /),\n// followed by anything, any number of times.\nvar twoStarDot = '(?:(?!(?:\\\\\\/|^)(?:\\\\.{1,2})($|\\\\\\/)).)*?'\n\n// not a ^ or / followed by a dot,\n// followed by anything, any number of times.\nvar twoStarNoDot = '(?:(?!(?:\\\\\\/|^)\\\\.).)*?'\n\n// characters that need to be escaped in RegExp.\nvar reSpecials = charSet('().*{}+?[]^$\\\\!')\n\n// \"abc\" -> { a:true, b:true, c:true }\nfunction charSet (s) {\n return s.split('').reduce(function (set, c) {\n set[c] = true\n return set\n }, {})\n}\n\n// normalizes slashes.\nvar slashSplit = /\\/+/\n\nminimatch.filter = filter\nfunction filter (pattern, options) {\n options = options || {}\n return function (p, i, list) {\n return minimatch(p, pattern, options)\n }\n}\n\nfunction ext (a, b) {\n a = a || {}\n b = b || {}\n var t = {}\n Object.keys(b).forEach(function (k) {\n t[k] = b[k]\n })\n Object.keys(a).forEach(function (k) {\n t[k] = a[k]\n })\n return t\n}\n\nminimatch.defaults = function (def) {\n if (!def || !Object.keys(def).length) return minimatch\n\n var orig = minimatch\n\n var m = function minimatch (p, pattern, options) {\n return orig.minimatch(p, pattern, ext(def, options))\n }\n\n m.Minimatch = function Minimatch (pattern, options) {\n return new orig.Minimatch(pattern, ext(def, options))\n }\n\n return m\n}\n\nMinimatch.defaults = function (def) {\n if (!def || !Object.keys(def).length) return Minimatch\n return minimatch.defaults(def).Minimatch\n}\n\nfunction minimatch (p, pattern, options) {\n if (typeof pattern !== 'string') {\n throw new TypeError('glob pattern string required')\n }\n\n if (!options) options = {}\n\n // shortcut: comments match nothing.\n if (!options.nocomment && pattern.charAt(0) === '#') {\n return false\n }\n\n // \"\" only matches \"\"\n if (pattern.trim() === '') return p === ''\n\n return new Minimatch(pattern, options).match(p)\n}\n\nfunction Minimatch (pattern, options) {\n if (!(this instanceof Minimatch)) {\n return new Minimatch(pattern, options)\n }\n\n if (typeof pattern !== 'string') {\n throw new TypeError('glob pattern string required')\n }\n\n if (!options) options = {}\n pattern = pattern.trim()\n\n // windows support: need to use /, not \\\n if (path.sep !== '/') {\n pattern = pattern.split(path.sep).join('/')\n }\n\n this.options = options\n this.set = []\n this.pattern = pattern\n this.regexp = null\n this.negate = false\n this.comment = false\n this.empty = false\n\n // make the set of regexps etc.\n this.make()\n}\n\nMinimatch.prototype.debug = function () {}\n\nMinimatch.prototype.make = make\nfunction make () {\n // don't do it more than once.\n if (this._made) return\n\n var pattern = this.pattern\n var options = this.options\n\n // empty patterns and comments match nothing.\n if (!options.nocomment && pattern.charAt(0) === '#') {\n this.comment = true\n return\n }\n if (!pattern) {\n this.empty = true\n return\n }\n\n // step 1: figure out negation, etc.\n this.parseNegate()\n\n // step 2: expand braces\n var set = this.globSet = this.braceExpand()\n\n if (options.debug) this.debug = console.error\n\n this.debug(this.pattern, set)\n\n // step 3: now we have a set, so turn each one into a series of path-portion\n // matching patterns.\n // These will be regexps, except in the case of \"**\", which is\n // set to the GLOBSTAR object for globstar behavior,\n // and will not contain any / characters\n set = this.globParts = set.map(function (s) {\n return s.split(slashSplit)\n })\n\n this.debug(this.pattern, set)\n\n // glob --> regexps\n set = set.map(function (s, si, set) {\n return s.map(this.parse, this)\n }, this)\n\n this.debug(this.pattern, set)\n\n // filter out everything that didn't compile properly.\n set = set.filter(function (s) {\n return s.indexOf(false) === -1\n })\n\n this.debug(this.pattern, set)\n\n this.set = set\n}\n\nMinimatch.prototype.parseNegate = parseNegate\nfunction parseNegate () {\n var pattern = this.pattern\n var negate = false\n var options = this.options\n var negateOffset = 0\n\n if (options.nonegate) return\n\n for (var i = 0, l = pattern.length\n ; i < l && pattern.charAt(i) === '!'\n ; i++) {\n negate = !negate\n negateOffset++\n }\n\n if (negateOffset) this.pattern = pattern.substr(negateOffset)\n this.negate = negate\n}\n\n// Brace expansion:\n// a{b,c}d -> abd acd\n// a{b,}c -> abc ac\n// a{0..3}d -> a0d a1d a2d a3d\n// a{b,c{d,e}f}g -> abg acdfg acefg\n// a{b,c}d{e,f}g -> abdeg acdeg abdeg abdfg\n//\n// Invalid sets are not expanded.\n// a{2..}b -> a{2..}b\n// a{b}c -> a{b}c\nminimatch.braceExpand = function (pattern, options) {\n return braceExpand(pattern, options)\n}\n\nMinimatch.prototype.braceExpand = braceExpand\n\nfunction braceExpand (pattern, options) {\n if (!options) {\n if (this instanceof Minimatch) {\n options = this.options\n } else {\n options = {}\n }\n }\n\n pattern = typeof pattern === 'undefined'\n ? this.pattern : pattern\n\n if (typeof pattern === 'undefined') {\n throw new TypeError('undefined pattern')\n }\n\n if (options.nobrace ||\n !pattern.match(/\\{.*\\}/)) {\n // shortcut. no need to expand.\n return [pattern]\n }\n\n return expand(pattern)\n}\n\n// parse a component of the expanded set.\n// At this point, no pattern may contain \"/\" in it\n// so we're going to return a 2d array, where each entry is the full\n// pattern, split on '/', and then turned into a regular expression.\n// A regexp is made at the end which joins each array with an\n// escaped /, and another full one which joins each regexp with |.\n//\n// Following the lead of Bash 4.1, note that \"**\" only has special meaning\n// when it is the *only* thing in a path portion. Otherwise, any series\n// of * is equivalent to a single *. Globstar behavior is enabled by\n// default, and can be disabled by setting options.noglobstar.\nMinimatch.prototype.parse = parse\nvar SUBPARSE = {}\nfunction parse (pattern, isSub) {\n if (pattern.length > 1024 * 64) {\n throw new TypeError('pattern is too long')\n }\n\n var options = this.options\n\n // shortcuts\n if (!options.noglobstar && pattern === '**') return GLOBSTAR\n if (pattern === '') return ''\n\n var re = ''\n var hasMagic = !!options.nocase\n var escaping = false\n // ? => one single character\n var patternListStack = []\n var negativeLists = []\n var stateChar\n var inClass = false\n var reClassStart = -1\n var classStart = -1\n // . and .. never match anything that doesn't start with .,\n // even when options.dot is set.\n var patternStart = pattern.charAt(0) === '.' ? '' // anything\n // not (start or / followed by . or .. followed by / or end)\n : options.dot ? '(?!(?:^|\\\\\\/)\\\\.{1,2}(?:$|\\\\\\/))'\n : '(?!\\\\.)'\n var self = this\n\n function clearStateChar () {\n if (stateChar) {\n // we had some state-tracking character\n // that wasn't consumed by this pass.\n switch (stateChar) {\n case '*':\n re += star\n hasMagic = true\n break\n case '?':\n re += qmark\n hasMagic = true\n break\n default:\n re += '\\\\' + stateChar\n break\n }\n self.debug('clearStateChar %j %j', stateChar, re)\n stateChar = false\n }\n }\n\n for (var i = 0, len = pattern.length, c\n ; (i < len) && (c = pattern.charAt(i))\n ; i++) {\n this.debug('%s\\t%s %s %j', pattern, i, re, c)\n\n // skip over any that are escaped.\n if (escaping && reSpecials[c]) {\n re += '\\\\' + c\n escaping = false\n continue\n }\n\n switch (c) {\n case '/':\n // completely not allowed, even escaped.\n // Should already be path-split by now.\n return false\n\n case '\\\\':\n clearStateChar()\n escaping = true\n continue\n\n // the various stateChar values\n // for the \"extglob\" stuff.\n case '?':\n case '*':\n case '+':\n case '@':\n case '!':\n this.debug('%s\\t%s %s %j <-- stateChar', pattern, i, re, c)\n\n // all of those are literals inside a class, except that\n // the glob [!a] means [^a] in regexp\n if (inClass) {\n this.debug(' in class')\n if (c === '!' && i === classStart + 1) c = '^'\n re += c\n continue\n }\n\n // if we already have a stateChar, then it means\n // that there was something like ** or +? in there.\n // Handle the stateChar, then proceed with this one.\n self.debug('call clearStateChar %j', stateChar)\n clearStateChar()\n stateChar = c\n // if extglob is disabled, then +(asdf|foo) isn't a thing.\n // just clear the statechar *now*, rather than even diving into\n // the patternList stuff.\n if (options.noext) clearStateChar()\n continue\n\n case '(':\n if (inClass) {\n re += '('\n continue\n }\n\n if (!stateChar) {\n re += '\\\\('\n continue\n }\n\n patternListStack.push({\n type: stateChar,\n start: i - 1,\n reStart: re.length,\n open: plTypes[stateChar].open,\n close: plTypes[stateChar].close\n })\n // negation is (?:(?!js)[^/]*)\n re += stateChar === '!' ? '(?:(?!(?:' : '(?:'\n this.debug('plType %j %j', stateChar, re)\n stateChar = false\n continue\n\n case ')':\n if (inClass || !patternListStack.length) {\n re += '\\\\)'\n continue\n }\n\n clearStateChar()\n hasMagic = true\n var pl = patternListStack.pop()\n // negation is (?:(?!js)[^/]*)\n // The others are (?:)\n re += pl.close\n if (pl.type === '!') {\n negativeLists.push(pl)\n }\n pl.reEnd = re.length\n continue\n\n case '|':\n if (inClass || !patternListStack.length || escaping) {\n re += '\\\\|'\n escaping = false\n continue\n }\n\n clearStateChar()\n re += '|'\n continue\n\n // these are mostly the same in regexp and glob\n case '[':\n // swallow any state-tracking char before the [\n clearStateChar()\n\n if (inClass) {\n re += '\\\\' + c\n continue\n }\n\n inClass = true\n classStart = i\n reClassStart = re.length\n re += c\n continue\n\n case ']':\n // a right bracket shall lose its special\n // meaning and represent itself in\n // a bracket expression if it occurs\n // first in the list. -- POSIX.2 2.8.3.2\n if (i === classStart + 1 || !inClass) {\n re += '\\\\' + c\n escaping = false\n continue\n }\n\n // handle the case where we left a class open.\n // \"[z-a]\" is valid, equivalent to \"\\[z-a\\]\"\n if (inClass) {\n // split where the last [ was, make sure we don't have\n // an invalid re. if so, re-walk the contents of the\n // would-be class to re-translate any characters that\n // were passed through as-is\n // TODO: It would probably be faster to determine this\n // without a try/catch and a new RegExp, but it's tricky\n // to do safely. For now, this is safe and works.\n var cs = pattern.substring(classStart + 1, i)\n try {\n RegExp('[' + cs + ']')\n } catch (er) {\n // not a valid class!\n var sp = this.parse(cs, SUBPARSE)\n re = re.substr(0, reClassStart) + '\\\\[' + sp[0] + '\\\\]'\n hasMagic = hasMagic || sp[1]\n inClass = false\n continue\n }\n }\n\n // finish up the class.\n hasMagic = true\n inClass = false\n re += c\n continue\n\n default:\n // swallow any state char that wasn't consumed\n clearStateChar()\n\n if (escaping) {\n // no need\n escaping = false\n } else if (reSpecials[c]\n && !(c === '^' && inClass)) {\n re += '\\\\'\n }\n\n re += c\n\n } // switch\n } // for\n\n // handle the case where we left a class open.\n // \"[abc\" is valid, equivalent to \"\\[abc\"\n if (inClass) {\n // split where the last [ was, and escape it\n // this is a huge pita. We now have to re-walk\n // the contents of the would-be class to re-translate\n // any characters that were passed through as-is\n cs = pattern.substr(classStart + 1)\n sp = this.parse(cs, SUBPARSE)\n re = re.substr(0, reClassStart) + '\\\\[' + sp[0]\n hasMagic = hasMagic || sp[1]\n }\n\n // handle the case where we had a +( thing at the *end*\n // of the pattern.\n // each pattern list stack adds 3 chars, and we need to go through\n // and escape any | chars that were passed through as-is for the regexp.\n // Go through and escape them, taking care not to double-escape any\n // | chars that were already escaped.\n for (pl = patternListStack.pop(); pl; pl = patternListStack.pop()) {\n var tail = re.slice(pl.reStart + pl.open.length)\n this.debug('setting tail', re, pl)\n // maybe some even number of \\, then maybe 1 \\, followed by a |\n tail = tail.replace(/((?:\\\\{2}){0,64})(\\\\?)\\|/g, function (_, $1, $2) {\n if (!$2) {\n // the | isn't already escaped, so escape it.\n $2 = '\\\\'\n }\n\n // need to escape all those slashes *again*, without escaping the\n // one that we need for escaping the | character. As it works out,\n // escaping an even number of slashes can be done by simply repeating\n // it exactly after itself. That's why this trick works.\n //\n // I am sorry that you have to see this.\n return $1 + $1 + $2 + '|'\n })\n\n this.debug('tail=%j\\n %s', tail, tail, pl, re)\n var t = pl.type === '*' ? star\n : pl.type === '?' ? qmark\n : '\\\\' + pl.type\n\n hasMagic = true\n re = re.slice(0, pl.reStart) + t + '\\\\(' + tail\n }\n\n // handle trailing things that only matter at the very end.\n clearStateChar()\n if (escaping) {\n // trailing \\\\\n re += '\\\\\\\\'\n }\n\n // only need to apply the nodot start if the re starts with\n // something that could conceivably capture a dot\n var addPatternStart = false\n switch (re.charAt(0)) {\n case '.':\n case '[':\n case '(': addPatternStart = true\n }\n\n // Hack to work around lack of negative lookbehind in JS\n // A pattern like: *.!(x).!(y|z) needs to ensure that a name\n // like 'a.xyz.yz' doesn't match. So, the first negative\n // lookahead, has to look ALL the way ahead, to the end of\n // the pattern.\n for (var n = negativeLists.length - 1; n > -1; n--) {\n var nl = negativeLists[n]\n\n var nlBefore = re.slice(0, nl.reStart)\n var nlFirst = re.slice(nl.reStart, nl.reEnd - 8)\n var nlLast = re.slice(nl.reEnd - 8, nl.reEnd)\n var nlAfter = re.slice(nl.reEnd)\n\n nlLast += nlAfter\n\n // Handle nested stuff like *(*.js|!(*.json)), where open parens\n // mean that we should *not* include the ) in the bit that is considered\n // \"after\" the negated section.\n var openParensBefore = nlBefore.split('(').length - 1\n var cleanAfter = nlAfter\n for (i = 0; i < openParensBefore; i++) {\n cleanAfter = cleanAfter.replace(/\\)[+*?]?/, '')\n }\n nlAfter = cleanAfter\n\n var dollar = ''\n if (nlAfter === '' && isSub !== SUBPARSE) {\n dollar = '$'\n }\n var newRe = nlBefore + nlFirst + nlAfter + dollar + nlLast\n re = newRe\n }\n\n // if the re is not \"\" at this point, then we need to make sure\n // it doesn't match against an empty path part.\n // Otherwise a/* will match a/, which it should not.\n if (re !== '' && hasMagic) {\n re = '(?=.)' + re\n }\n\n if (addPatternStart) {\n re = patternStart + re\n }\n\n // parsing just a piece of a larger pattern.\n if (isSub === SUBPARSE) {\n return [re, hasMagic]\n }\n\n // skip the regexp for non-magical patterns\n // unescape anything in it, though, so that it'll be\n // an exact match against a file etc.\n if (!hasMagic) {\n return globUnescape(pattern)\n }\n\n var flags = options.nocase ? 'i' : ''\n try {\n var regExp = new RegExp('^' + re + '$', flags)\n } catch (er) {\n // If it was an invalid regular expression, then it can't match\n // anything. This trick looks for a character after the end of\n // the string, which is of course impossible, except in multi-line\n // mode, but it's not a /m regex.\n return new RegExp('$.')\n }\n\n regExp._glob = pattern\n regExp._src = re\n\n return regExp\n}\n\nminimatch.makeRe = function (pattern, options) {\n return new Minimatch(pattern, options || {}).makeRe()\n}\n\nMinimatch.prototype.makeRe = makeRe\nfunction makeRe () {\n if (this.regexp || this.regexp === false) return this.regexp\n\n // at this point, this.set is a 2d array of partial\n // pattern strings, or \"**\".\n //\n // It's better to use .match(). This function shouldn't\n // be used, really, but it's pretty convenient sometimes,\n // when you just want to work with a regex.\n var set = this.set\n\n if (!set.length) {\n this.regexp = false\n return this.regexp\n }\n var options = this.options\n\n var twoStar = options.noglobstar ? star\n : options.dot ? twoStarDot\n : twoStarNoDot\n var flags = options.nocase ? 'i' : ''\n\n var re = set.map(function (pattern) {\n return pattern.map(function (p) {\n return (p === GLOBSTAR) ? twoStar\n : (typeof p === 'string') ? regExpEscape(p)\n : p._src\n }).join('\\\\\\/')\n }).join('|')\n\n // must match entire pattern\n // ending in a * or ** will make it less strict.\n re = '^(?:' + re + ')$'\n\n // can match anything, as long as it's not this.\n if (this.negate) re = '^(?!' + re + ').*$'\n\n try {\n this.regexp = new RegExp(re, flags)\n } catch (ex) {\n this.regexp = false\n }\n return this.regexp\n}\n\nminimatch.match = function (list, pattern, options) {\n options = options || {}\n var mm = new Minimatch(pattern, options)\n list = list.filter(function (f) {\n return mm.match(f)\n })\n if (mm.options.nonull && !list.length) {\n list.push(pattern)\n }\n return list\n}\n\nMinimatch.prototype.match = match\nfunction match (f, partial) {\n this.debug('match', f, this.pattern)\n // short-circuit in the case of busted things.\n // comments, etc.\n if (this.comment) return false\n if (this.empty) return f === ''\n\n if (f === '/' && partial) return true\n\n var options = this.options\n\n // windows: need to use /, not \\\n if (path.sep !== '/') {\n f = f.split(path.sep).join('/')\n }\n\n // treat the test path as a set of pathparts.\n f = f.split(slashSplit)\n this.debug(this.pattern, 'split', f)\n\n // just ONE of the pattern sets in this.set needs to match\n // in order for it to be valid. If negating, then just one\n // match means that we have failed.\n // Either way, return on the first hit.\n\n var set = this.set\n this.debug(this.pattern, 'set', set)\n\n // Find the basename of the path by looking for the last non-empty segment\n var filename\n var i\n for (i = f.length - 1; i >= 0; i--) {\n filename = f[i]\n if (filename) break\n }\n\n for (i = 0; i < set.length; i++) {\n var pattern = set[i]\n var file = f\n if (options.matchBase && pattern.length === 1) {\n file = [filename]\n }\n var hit = this.matchOne(file, pattern, partial)\n if (hit) {\n if (options.flipNegate) return true\n return !this.negate\n }\n }\n\n // didn't get any hits. this is success if it's a negative\n // pattern, failure otherwise.\n if (options.flipNegate) return false\n return this.negate\n}\n\n// set partial to true to test if, for example,\n// \"/a/b\" matches the start of \"/*/b/*/d\"\n// Partial means, if you run out of file before you run\n// out of pattern, then that's fine, as long as all\n// the parts match.\nMinimatch.prototype.matchOne = function (file, pattern, partial) {\n var options = this.options\n\n this.debug('matchOne',\n { 'this': this, file: file, pattern: pattern })\n\n this.debug('matchOne', file.length, pattern.length)\n\n for (var fi = 0,\n pi = 0,\n fl = file.length,\n pl = pattern.length\n ; (fi < fl) && (pi < pl)\n ; fi++, pi++) {\n this.debug('matchOne loop')\n var p = pattern[pi]\n var f = file[fi]\n\n this.debug(pattern, p, f)\n\n // should be impossible.\n // some invalid regexp stuff in the set.\n if (p === false) return false\n\n if (p === GLOBSTAR) {\n this.debug('GLOBSTAR', [pattern, p, f])\n\n // \"**\"\n // a/**/b/**/c would match the following:\n // a/b/x/y/z/c\n // a/x/y/z/b/c\n // a/b/x/b/x/c\n // a/b/c\n // To do this, take the rest of the pattern after\n // the **, and see if it would match the file remainder.\n // If so, return success.\n // If not, the ** \"swallows\" a segment, and try again.\n // This is recursively awful.\n //\n // a/**/b/**/c matching a/b/x/y/z/c\n // - a matches a\n // - doublestar\n // - matchOne(b/x/y/z/c, b/**/c)\n // - b matches b\n // - doublestar\n // - matchOne(x/y/z/c, c) -> no\n // - matchOne(y/z/c, c) -> no\n // - matchOne(z/c, c) -> no\n // - matchOne(c, c) yes, hit\n var fr = fi\n var pr = pi + 1\n if (pr === pl) {\n this.debug('** at the end')\n // a ** at the end will just swallow the rest.\n // We have found a match.\n // however, it will not swallow /.x, unless\n // options.dot is set.\n // . and .. are *never* matched by **, for explosively\n // exponential reasons.\n for (; fi < fl; fi++) {\n if (file[fi] === '.' || file[fi] === '..' ||\n (!options.dot && file[fi].charAt(0) === '.')) return false\n }\n return true\n }\n\n // ok, let's see if we can swallow whatever we can.\n while (fr < fl) {\n var swallowee = file[fr]\n\n this.debug('\\nglobstar while', file, fr, pattern, pr, swallowee)\n\n // XXX remove this slice. Just pass the start index.\n if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) {\n this.debug('globstar found match!', fr, fl, swallowee)\n // found a match.\n return true\n } else {\n // can't swallow \".\" or \"..\" ever.\n // can only swallow \".foo\" when explicitly asked.\n if (swallowee === '.' || swallowee === '..' ||\n (!options.dot && swallowee.charAt(0) === '.')) {\n this.debug('dot detected!', file, fr, pattern, pr)\n break\n }\n\n // ** swallows a segment, and continue.\n this.debug('globstar swallow a segment, and continue')\n fr++\n }\n }\n\n // no match was found.\n // However, in partial mode, we can't say this is necessarily over.\n // If there's more *pattern* left, then\n if (partial) {\n // ran out of file\n this.debug('\\n>>> no match, partial?', file, fr, pattern, pr)\n if (fr === fl) return true\n }\n return false\n }\n\n // something other than **\n // non-magic patterns just have to match exactly\n // patterns with magic have been turned into regexps.\n var hit\n if (typeof p === 'string') {\n if (options.nocase) {\n hit = f.toLowerCase() === p.toLowerCase()\n } else {\n hit = f === p\n }\n this.debug('string match', p, f, hit)\n } else {\n hit = f.match(p)\n this.debug('pattern match', p, f, hit)\n }\n\n if (!hit) return false\n }\n\n // Note: ending in / means that we'll get a final \"\"\n // at the end of the pattern. This can only match a\n // corresponding \"\" at the end of the file.\n // If the file ends in /, then it can only match a\n // a pattern that ends in /, unless the pattern just\n // doesn't have any more for it. But, a/b/ should *not*\n // match \"a/b/*\", even though \"\" matches against the\n // [^/]*? pattern, except in partial mode, where it might\n // simply not be reached yet.\n // However, a/b/ should still satisfy a/*\n\n // now either we fell off the end of the pattern, or we're done.\n if (fi === fl && pi === pl) {\n // ran out of pattern and filename at the same time.\n // an exact hit!\n return true\n } else if (fi === fl) {\n // ran out of file, but still had pattern left.\n // this is ok if we're doing the match as part of\n // a glob fs traversal.\n return partial\n } else if (pi === pl) {\n // ran out of pattern, still have file left.\n // this is only acceptable if we're on the very last\n // empty segment of a file with a trailing slash.\n // a/* should match a/b/\n var emptyFileEnd = (fi === fl - 1) && (file[fi] === '')\n return emptyFileEnd\n }\n\n // should be unreachable.\n throw new Error('wtf?')\n}\n\n// replace stuff like \\* with *\nfunction globUnescape (s) {\n return s.replace(/\\\\(.)/g, '$1')\n}\n\nfunction regExpEscape (s) {\n return s.replace(/[-[\\]{}()*+?.,\\\\^$|#\\s]/g, '\\\\$&')\n}\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nfunction _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }\n\nvar Stream = _interopDefault(require('stream'));\nvar http = _interopDefault(require('http'));\nvar Url = _interopDefault(require('url'));\nvar https = _interopDefault(require('https'));\nvar zlib = _interopDefault(require('zlib'));\n\n// Based on https://github.com/tmpvar/jsdom/blob/aa85b2abf07766ff7bf5c1f6daafb3726f2f2db5/lib/jsdom/living/blob.js\n\n// fix for \"Readable\" isn't a named export issue\nconst Readable = Stream.Readable;\n\nconst BUFFER = Symbol('buffer');\nconst TYPE = Symbol('type');\n\nclass Blob {\n\tconstructor() {\n\t\tthis[TYPE] = '';\n\n\t\tconst blobParts = arguments[0];\n\t\tconst options = arguments[1];\n\n\t\tconst buffers = [];\n\t\tlet size = 0;\n\n\t\tif (blobParts) {\n\t\t\tconst a = blobParts;\n\t\t\tconst length = Number(a.length);\n\t\t\tfor (let i = 0; i < length; i++) {\n\t\t\t\tconst element = a[i];\n\t\t\t\tlet buffer;\n\t\t\t\tif (element instanceof Buffer) {\n\t\t\t\t\tbuffer = element;\n\t\t\t\t} else if (ArrayBuffer.isView(element)) {\n\t\t\t\t\tbuffer = Buffer.from(element.buffer, element.byteOffset, element.byteLength);\n\t\t\t\t} else if (element instanceof ArrayBuffer) {\n\t\t\t\t\tbuffer = Buffer.from(element);\n\t\t\t\t} else if (element instanceof Blob) {\n\t\t\t\t\tbuffer = element[BUFFER];\n\t\t\t\t} else {\n\t\t\t\t\tbuffer = Buffer.from(typeof element === 'string' ? element : String(element));\n\t\t\t\t}\n\t\t\t\tsize += buffer.length;\n\t\t\t\tbuffers.push(buffer);\n\t\t\t}\n\t\t}\n\n\t\tthis[BUFFER] = Buffer.concat(buffers);\n\n\t\tlet type = options && options.type !== undefined && String(options.type).toLowerCase();\n\t\tif (type && !/[^\\u0020-\\u007E]/.test(type)) {\n\t\t\tthis[TYPE] = type;\n\t\t}\n\t}\n\tget size() {\n\t\treturn this[BUFFER].length;\n\t}\n\tget type() {\n\t\treturn this[TYPE];\n\t}\n\ttext() {\n\t\treturn Promise.resolve(this[BUFFER].toString());\n\t}\n\tarrayBuffer() {\n\t\tconst buf = this[BUFFER];\n\t\tconst ab = buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength);\n\t\treturn Promise.resolve(ab);\n\t}\n\tstream() {\n\t\tconst readable = new Readable();\n\t\treadable._read = function () {};\n\t\treadable.push(this[BUFFER]);\n\t\treadable.push(null);\n\t\treturn readable;\n\t}\n\ttoString() {\n\t\treturn '[object Blob]';\n\t}\n\tslice() {\n\t\tconst size = this.size;\n\n\t\tconst start = arguments[0];\n\t\tconst end = arguments[1];\n\t\tlet relativeStart, relativeEnd;\n\t\tif (start === undefined) {\n\t\t\trelativeStart = 0;\n\t\t} else if (start < 0) {\n\t\t\trelativeStart = Math.max(size + start, 0);\n\t\t} else {\n\t\t\trelativeStart = Math.min(start, size);\n\t\t}\n\t\tif (end === undefined) {\n\t\t\trelativeEnd = size;\n\t\t} else if (end < 0) {\n\t\t\trelativeEnd = Math.max(size + end, 0);\n\t\t} else {\n\t\t\trelativeEnd = Math.min(end, size);\n\t\t}\n\t\tconst span = Math.max(relativeEnd - relativeStart, 0);\n\n\t\tconst buffer = this[BUFFER];\n\t\tconst slicedBuffer = buffer.slice(relativeStart, relativeStart + span);\n\t\tconst blob = new Blob([], { type: arguments[2] });\n\t\tblob[BUFFER] = slicedBuffer;\n\t\treturn blob;\n\t}\n}\n\nObject.defineProperties(Blob.prototype, {\n\tsize: { enumerable: true },\n\ttype: { enumerable: true },\n\tslice: { enumerable: true }\n});\n\nObject.defineProperty(Blob.prototype, Symbol.toStringTag, {\n\tvalue: 'Blob',\n\twritable: false,\n\tenumerable: false,\n\tconfigurable: true\n});\n\n/**\n * fetch-error.js\n *\n * FetchError interface for operational errors\n */\n\n/**\n * Create FetchError instance\n *\n * @param String message Error message for human\n * @param String type Error type for machine\n * @param String systemError For Node.js system error\n * @return FetchError\n */\nfunction FetchError(message, type, systemError) {\n Error.call(this, message);\n\n this.message = message;\n this.type = type;\n\n // when err.type is `system`, err.code contains system error code\n if (systemError) {\n this.code = this.errno = systemError.code;\n }\n\n // hide custom error implementation details from end-users\n Error.captureStackTrace(this, this.constructor);\n}\n\nFetchError.prototype = Object.create(Error.prototype);\nFetchError.prototype.constructor = FetchError;\nFetchError.prototype.name = 'FetchError';\n\nlet convert;\ntry {\n\tconvert = require('encoding').convert;\n} catch (e) {}\n\nconst INTERNALS = Symbol('Body internals');\n\n// fix an issue where \"PassThrough\" isn't a named export for node <10\nconst PassThrough = Stream.PassThrough;\n\n/**\n * Body mixin\n *\n * Ref: https://fetch.spec.whatwg.org/#body\n *\n * @param Stream body Readable stream\n * @param Object opts Response options\n * @return Void\n */\nfunction Body(body) {\n\tvar _this = this;\n\n\tvar _ref = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {},\n\t _ref$size = _ref.size;\n\n\tlet size = _ref$size === undefined ? 0 : _ref$size;\n\tvar _ref$timeout = _ref.timeout;\n\tlet timeout = _ref$timeout === undefined ? 0 : _ref$timeout;\n\n\tif (body == null) {\n\t\t// body is undefined or null\n\t\tbody = null;\n\t} else if (isURLSearchParams(body)) {\n\t\t// body is a URLSearchParams\n\t\tbody = Buffer.from(body.toString());\n\t} else if (isBlob(body)) ; else if (Buffer.isBuffer(body)) ; else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') {\n\t\t// body is ArrayBuffer\n\t\tbody = Buffer.from(body);\n\t} else if (ArrayBuffer.isView(body)) {\n\t\t// body is ArrayBufferView\n\t\tbody = Buffer.from(body.buffer, body.byteOffset, body.byteLength);\n\t} else if (body instanceof Stream) ; else {\n\t\t// none of the above\n\t\t// coerce to string then buffer\n\t\tbody = Buffer.from(String(body));\n\t}\n\tthis[INTERNALS] = {\n\t\tbody,\n\t\tdisturbed: false,\n\t\terror: null\n\t};\n\tthis.size = size;\n\tthis.timeout = timeout;\n\n\tif (body instanceof Stream) {\n\t\tbody.on('error', function (err) {\n\t\t\tconst error = err.name === 'AbortError' ? err : new FetchError(`Invalid response body while trying to fetch ${_this.url}: ${err.message}`, 'system', err);\n\t\t\t_this[INTERNALS].error = error;\n\t\t});\n\t}\n}\n\nBody.prototype = {\n\tget body() {\n\t\treturn this[INTERNALS].body;\n\t},\n\n\tget bodyUsed() {\n\t\treturn this[INTERNALS].disturbed;\n\t},\n\n\t/**\n * Decode response as ArrayBuffer\n *\n * @return Promise\n */\n\tarrayBuffer() {\n\t\treturn consumeBody.call(this).then(function (buf) {\n\t\t\treturn buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength);\n\t\t});\n\t},\n\n\t/**\n * Return raw response as Blob\n *\n * @return Promise\n */\n\tblob() {\n\t\tlet ct = this.headers && this.headers.get('content-type') || '';\n\t\treturn consumeBody.call(this).then(function (buf) {\n\t\t\treturn Object.assign(\n\t\t\t// Prevent copying\n\t\t\tnew Blob([], {\n\t\t\t\ttype: ct.toLowerCase()\n\t\t\t}), {\n\t\t\t\t[BUFFER]: buf\n\t\t\t});\n\t\t});\n\t},\n\n\t/**\n * Decode response as json\n *\n * @return Promise\n */\n\tjson() {\n\t\tvar _this2 = this;\n\n\t\treturn consumeBody.call(this).then(function (buffer) {\n\t\t\ttry {\n\t\t\t\treturn JSON.parse(buffer.toString());\n\t\t\t} catch (err) {\n\t\t\t\treturn Body.Promise.reject(new FetchError(`invalid json response body at ${_this2.url} reason: ${err.message}`, 'invalid-json'));\n\t\t\t}\n\t\t});\n\t},\n\n\t/**\n * Decode response as text\n *\n * @return Promise\n */\n\ttext() {\n\t\treturn consumeBody.call(this).then(function (buffer) {\n\t\t\treturn buffer.toString();\n\t\t});\n\t},\n\n\t/**\n * Decode response as buffer (non-spec api)\n *\n * @return Promise\n */\n\tbuffer() {\n\t\treturn consumeBody.call(this);\n\t},\n\n\t/**\n * Decode response as text, while automatically detecting the encoding and\n * trying to decode to UTF-8 (non-spec api)\n *\n * @return Promise\n */\n\ttextConverted() {\n\t\tvar _this3 = this;\n\n\t\treturn consumeBody.call(this).then(function (buffer) {\n\t\t\treturn convertBody(buffer, _this3.headers);\n\t\t});\n\t}\n};\n\n// In browsers, all properties are enumerable.\nObject.defineProperties(Body.prototype, {\n\tbody: { enumerable: true },\n\tbodyUsed: { enumerable: true },\n\tarrayBuffer: { enumerable: true },\n\tblob: { enumerable: true },\n\tjson: { enumerable: true },\n\ttext: { enumerable: true }\n});\n\nBody.mixIn = function (proto) {\n\tfor (const name of Object.getOwnPropertyNames(Body.prototype)) {\n\t\t// istanbul ignore else: future proof\n\t\tif (!(name in proto)) {\n\t\t\tconst desc = Object.getOwnPropertyDescriptor(Body.prototype, name);\n\t\t\tObject.defineProperty(proto, name, desc);\n\t\t}\n\t}\n};\n\n/**\n * Consume and convert an entire Body to a Buffer.\n *\n * Ref: https://fetch.spec.whatwg.org/#concept-body-consume-body\n *\n * @return Promise\n */\nfunction consumeBody() {\n\tvar _this4 = this;\n\n\tif (this[INTERNALS].disturbed) {\n\t\treturn Body.Promise.reject(new TypeError(`body used already for: ${this.url}`));\n\t}\n\n\tthis[INTERNALS].disturbed = true;\n\n\tif (this[INTERNALS].error) {\n\t\treturn Body.Promise.reject(this[INTERNALS].error);\n\t}\n\n\tlet body = this.body;\n\n\t// body is null\n\tif (body === null) {\n\t\treturn Body.Promise.resolve(Buffer.alloc(0));\n\t}\n\n\t// body is blob\n\tif (isBlob(body)) {\n\t\tbody = body.stream();\n\t}\n\n\t// body is buffer\n\tif (Buffer.isBuffer(body)) {\n\t\treturn Body.Promise.resolve(body);\n\t}\n\n\t// istanbul ignore if: should never happen\n\tif (!(body instanceof Stream)) {\n\t\treturn Body.Promise.resolve(Buffer.alloc(0));\n\t}\n\n\t// body is stream\n\t// get ready to actually consume the body\n\tlet accum = [];\n\tlet accumBytes = 0;\n\tlet abort = false;\n\n\treturn new Body.Promise(function (resolve, reject) {\n\t\tlet resTimeout;\n\n\t\t// allow timeout on slow response body\n\t\tif (_this4.timeout) {\n\t\t\tresTimeout = setTimeout(function () {\n\t\t\t\tabort = true;\n\t\t\t\treject(new FetchError(`Response timeout while trying to fetch ${_this4.url} (over ${_this4.timeout}ms)`, 'body-timeout'));\n\t\t\t}, _this4.timeout);\n\t\t}\n\n\t\t// handle stream errors\n\t\tbody.on('error', function (err) {\n\t\t\tif (err.name === 'AbortError') {\n\t\t\t\t// if the request was aborted, reject with this Error\n\t\t\t\tabort = true;\n\t\t\t\treject(err);\n\t\t\t} else {\n\t\t\t\t// other errors, such as incorrect content-encoding\n\t\t\t\treject(new FetchError(`Invalid response body while trying to fetch ${_this4.url}: ${err.message}`, 'system', err));\n\t\t\t}\n\t\t});\n\n\t\tbody.on('data', function (chunk) {\n\t\t\tif (abort || chunk === null) {\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\tif (_this4.size && accumBytes + chunk.length > _this4.size) {\n\t\t\t\tabort = true;\n\t\t\t\treject(new FetchError(`content size at ${_this4.url} over limit: ${_this4.size}`, 'max-size'));\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\taccumBytes += chunk.length;\n\t\t\taccum.push(chunk);\n\t\t});\n\n\t\tbody.on('end', function () {\n\t\t\tif (abort) {\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\tclearTimeout(resTimeout);\n\n\t\t\ttry {\n\t\t\t\tresolve(Buffer.concat(accum, accumBytes));\n\t\t\t} catch (err) {\n\t\t\t\t// handle streams that have accumulated too much data (issue #414)\n\t\t\t\treject(new FetchError(`Could not create Buffer from response body for ${_this4.url}: ${err.message}`, 'system', err));\n\t\t\t}\n\t\t});\n\t});\n}\n\n/**\n * Detect buffer encoding and convert to target encoding\n * ref: http://www.w3.org/TR/2011/WD-html5-20110113/parsing.html#determining-the-character-encoding\n *\n * @param Buffer buffer Incoming buffer\n * @param String encoding Target encoding\n * @return String\n */\nfunction convertBody(buffer, headers) {\n\tif (typeof convert !== 'function') {\n\t\tthrow new Error('The package `encoding` must be installed to use the textConverted() function');\n\t}\n\n\tconst ct = headers.get('content-type');\n\tlet charset = 'utf-8';\n\tlet res, str;\n\n\t// header\n\tif (ct) {\n\t\tres = /charset=([^;]*)/i.exec(ct);\n\t}\n\n\t// no charset in content type, peek at response body for at most 1024 bytes\n\tstr = buffer.slice(0, 1024).toString();\n\n\t// html5\n\tif (!res && str) {\n\t\tres = / 0 && arguments[0] !== undefined ? arguments[0] : undefined;\n\n\t\tthis[MAP] = Object.create(null);\n\n\t\tif (init instanceof Headers) {\n\t\t\tconst rawHeaders = init.raw();\n\t\t\tconst headerNames = Object.keys(rawHeaders);\n\n\t\t\tfor (const headerName of headerNames) {\n\t\t\t\tfor (const value of rawHeaders[headerName]) {\n\t\t\t\t\tthis.append(headerName, value);\n\t\t\t\t}\n\t\t\t}\n\n\t\t\treturn;\n\t\t}\n\n\t\t// We don't worry about converting prop to ByteString here as append()\n\t\t// will handle it.\n\t\tif (init == null) ; else if (typeof init === 'object') {\n\t\t\tconst method = init[Symbol.iterator];\n\t\t\tif (method != null) {\n\t\t\t\tif (typeof method !== 'function') {\n\t\t\t\t\tthrow new TypeError('Header pairs must be iterable');\n\t\t\t\t}\n\n\t\t\t\t// sequence>\n\t\t\t\t// Note: per spec we have to first exhaust the lists then process them\n\t\t\t\tconst pairs = [];\n\t\t\t\tfor (const pair of init) {\n\t\t\t\t\tif (typeof pair !== 'object' || typeof pair[Symbol.iterator] !== 'function') {\n\t\t\t\t\t\tthrow new TypeError('Each header pair must be iterable');\n\t\t\t\t\t}\n\t\t\t\t\tpairs.push(Array.from(pair));\n\t\t\t\t}\n\n\t\t\t\tfor (const pair of pairs) {\n\t\t\t\t\tif (pair.length !== 2) {\n\t\t\t\t\t\tthrow new TypeError('Each header pair must be a name/value tuple');\n\t\t\t\t\t}\n\t\t\t\t\tthis.append(pair[0], pair[1]);\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\t// record\n\t\t\t\tfor (const key of Object.keys(init)) {\n\t\t\t\t\tconst value = init[key];\n\t\t\t\t\tthis.append(key, value);\n\t\t\t\t}\n\t\t\t}\n\t\t} else {\n\t\t\tthrow new TypeError('Provided initializer must be an object');\n\t\t}\n\t}\n\n\t/**\n * Return combined header value given name\n *\n * @param String name Header name\n * @return Mixed\n */\n\tget(name) {\n\t\tname = `${name}`;\n\t\tvalidateName(name);\n\t\tconst key = find(this[MAP], name);\n\t\tif (key === undefined) {\n\t\t\treturn null;\n\t\t}\n\n\t\treturn this[MAP][key].join(', ');\n\t}\n\n\t/**\n * Iterate over all headers\n *\n * @param Function callback Executed for each item with parameters (value, name, thisArg)\n * @param Boolean thisArg `this` context for callback function\n * @return Void\n */\n\tforEach(callback) {\n\t\tlet thisArg = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : undefined;\n\n\t\tlet pairs = getHeaders(this);\n\t\tlet i = 0;\n\t\twhile (i < pairs.length) {\n\t\t\tvar _pairs$i = pairs[i];\n\t\t\tconst name = _pairs$i[0],\n\t\t\t value = _pairs$i[1];\n\n\t\t\tcallback.call(thisArg, value, name, this);\n\t\t\tpairs = getHeaders(this);\n\t\t\ti++;\n\t\t}\n\t}\n\n\t/**\n * Overwrite header values given name\n *\n * @param String name Header name\n * @param String value Header value\n * @return Void\n */\n\tset(name, value) {\n\t\tname = `${name}`;\n\t\tvalue = `${value}`;\n\t\tvalidateName(name);\n\t\tvalidateValue(value);\n\t\tconst key = find(this[MAP], name);\n\t\tthis[MAP][key !== undefined ? key : name] = [value];\n\t}\n\n\t/**\n * Append a value onto existing header\n *\n * @param String name Header name\n * @param String value Header value\n * @return Void\n */\n\tappend(name, value) {\n\t\tname = `${name}`;\n\t\tvalue = `${value}`;\n\t\tvalidateName(name);\n\t\tvalidateValue(value);\n\t\tconst key = find(this[MAP], name);\n\t\tif (key !== undefined) {\n\t\t\tthis[MAP][key].push(value);\n\t\t} else {\n\t\t\tthis[MAP][name] = [value];\n\t\t}\n\t}\n\n\t/**\n * Check for header name existence\n *\n * @param String name Header name\n * @return Boolean\n */\n\thas(name) {\n\t\tname = `${name}`;\n\t\tvalidateName(name);\n\t\treturn find(this[MAP], name) !== undefined;\n\t}\n\n\t/**\n * Delete all header values given name\n *\n * @param String name Header name\n * @return Void\n */\n\tdelete(name) {\n\t\tname = `${name}`;\n\t\tvalidateName(name);\n\t\tconst key = find(this[MAP], name);\n\t\tif (key !== undefined) {\n\t\t\tdelete this[MAP][key];\n\t\t}\n\t}\n\n\t/**\n * Return raw headers (non-spec api)\n *\n * @return Object\n */\n\traw() {\n\t\treturn this[MAP];\n\t}\n\n\t/**\n * Get an iterator on keys.\n *\n * @return Iterator\n */\n\tkeys() {\n\t\treturn createHeadersIterator(this, 'key');\n\t}\n\n\t/**\n * Get an iterator on values.\n *\n * @return Iterator\n */\n\tvalues() {\n\t\treturn createHeadersIterator(this, 'value');\n\t}\n\n\t/**\n * Get an iterator on entries.\n *\n * This is the default iterator of the Headers object.\n *\n * @return Iterator\n */\n\t[Symbol.iterator]() {\n\t\treturn createHeadersIterator(this, 'key+value');\n\t}\n}\nHeaders.prototype.entries = Headers.prototype[Symbol.iterator];\n\nObject.defineProperty(Headers.prototype, Symbol.toStringTag, {\n\tvalue: 'Headers',\n\twritable: false,\n\tenumerable: false,\n\tconfigurable: true\n});\n\nObject.defineProperties(Headers.prototype, {\n\tget: { enumerable: true },\n\tforEach: { enumerable: true },\n\tset: { enumerable: true },\n\tappend: { enumerable: true },\n\thas: { enumerable: true },\n\tdelete: { enumerable: true },\n\tkeys: { enumerable: true },\n\tvalues: { enumerable: true },\n\tentries: { enumerable: true }\n});\n\nfunction getHeaders(headers) {\n\tlet kind = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'key+value';\n\n\tconst keys = Object.keys(headers[MAP]).sort();\n\treturn keys.map(kind === 'key' ? function (k) {\n\t\treturn k.toLowerCase();\n\t} : kind === 'value' ? function (k) {\n\t\treturn headers[MAP][k].join(', ');\n\t} : function (k) {\n\t\treturn [k.toLowerCase(), headers[MAP][k].join(', ')];\n\t});\n}\n\nconst INTERNAL = Symbol('internal');\n\nfunction createHeadersIterator(target, kind) {\n\tconst iterator = Object.create(HeadersIteratorPrototype);\n\titerator[INTERNAL] = {\n\t\ttarget,\n\t\tkind,\n\t\tindex: 0\n\t};\n\treturn iterator;\n}\n\nconst HeadersIteratorPrototype = Object.setPrototypeOf({\n\tnext() {\n\t\t// istanbul ignore if\n\t\tif (!this || Object.getPrototypeOf(this) !== HeadersIteratorPrototype) {\n\t\t\tthrow new TypeError('Value of `this` is not a HeadersIterator');\n\t\t}\n\n\t\tvar _INTERNAL = this[INTERNAL];\n\t\tconst target = _INTERNAL.target,\n\t\t kind = _INTERNAL.kind,\n\t\t index = _INTERNAL.index;\n\n\t\tconst values = getHeaders(target, kind);\n\t\tconst len = values.length;\n\t\tif (index >= len) {\n\t\t\treturn {\n\t\t\t\tvalue: undefined,\n\t\t\t\tdone: true\n\t\t\t};\n\t\t}\n\n\t\tthis[INTERNAL].index = index + 1;\n\n\t\treturn {\n\t\t\tvalue: values[index],\n\t\t\tdone: false\n\t\t};\n\t}\n}, Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]())));\n\nObject.defineProperty(HeadersIteratorPrototype, Symbol.toStringTag, {\n\tvalue: 'HeadersIterator',\n\twritable: false,\n\tenumerable: false,\n\tconfigurable: true\n});\n\n/**\n * Export the Headers object in a form that Node.js can consume.\n *\n * @param Headers headers\n * @return Object\n */\nfunction exportNodeCompatibleHeaders(headers) {\n\tconst obj = Object.assign({ __proto__: null }, headers[MAP]);\n\n\t// http.request() only supports string as Host header. This hack makes\n\t// specifying custom Host header possible.\n\tconst hostHeaderKey = find(headers[MAP], 'Host');\n\tif (hostHeaderKey !== undefined) {\n\t\tobj[hostHeaderKey] = obj[hostHeaderKey][0];\n\t}\n\n\treturn obj;\n}\n\n/**\n * Create a Headers object from an object of headers, ignoring those that do\n * not conform to HTTP grammar productions.\n *\n * @param Object obj Object of headers\n * @return Headers\n */\nfunction createHeadersLenient(obj) {\n\tconst headers = new Headers();\n\tfor (const name of Object.keys(obj)) {\n\t\tif (invalidTokenRegex.test(name)) {\n\t\t\tcontinue;\n\t\t}\n\t\tif (Array.isArray(obj[name])) {\n\t\t\tfor (const val of obj[name]) {\n\t\t\t\tif (invalidHeaderCharRegex.test(val)) {\n\t\t\t\t\tcontinue;\n\t\t\t\t}\n\t\t\t\tif (headers[MAP][name] === undefined) {\n\t\t\t\t\theaders[MAP][name] = [val];\n\t\t\t\t} else {\n\t\t\t\t\theaders[MAP][name].push(val);\n\t\t\t\t}\n\t\t\t}\n\t\t} else if (!invalidHeaderCharRegex.test(obj[name])) {\n\t\t\theaders[MAP][name] = [obj[name]];\n\t\t}\n\t}\n\treturn headers;\n}\n\nconst INTERNALS$1 = Symbol('Response internals');\n\n// fix an issue where \"STATUS_CODES\" aren't a named export for node <10\nconst STATUS_CODES = http.STATUS_CODES;\n\n/**\n * Response class\n *\n * @param Stream body Readable stream\n * @param Object opts Response options\n * @return Void\n */\nclass Response {\n\tconstructor() {\n\t\tlet body = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : null;\n\t\tlet opts = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};\n\n\t\tBody.call(this, body, opts);\n\n\t\tconst status = opts.status || 200;\n\t\tconst headers = new Headers(opts.headers);\n\n\t\tif (body != null && !headers.has('Content-Type')) {\n\t\t\tconst contentType = extractContentType(body);\n\t\t\tif (contentType) {\n\t\t\t\theaders.append('Content-Type', contentType);\n\t\t\t}\n\t\t}\n\n\t\tthis[INTERNALS$1] = {\n\t\t\turl: opts.url,\n\t\t\tstatus,\n\t\t\tstatusText: opts.statusText || STATUS_CODES[status],\n\t\t\theaders,\n\t\t\tcounter: opts.counter\n\t\t};\n\t}\n\n\tget url() {\n\t\treturn this[INTERNALS$1].url || '';\n\t}\n\n\tget status() {\n\t\treturn this[INTERNALS$1].status;\n\t}\n\n\t/**\n * Convenience property representing if the request ended normally\n */\n\tget ok() {\n\t\treturn this[INTERNALS$1].status >= 200 && this[INTERNALS$1].status < 300;\n\t}\n\n\tget redirected() {\n\t\treturn this[INTERNALS$1].counter > 0;\n\t}\n\n\tget statusText() {\n\t\treturn this[INTERNALS$1].statusText;\n\t}\n\n\tget headers() {\n\t\treturn this[INTERNALS$1].headers;\n\t}\n\n\t/**\n * Clone this response\n *\n * @return Response\n */\n\tclone() {\n\t\treturn new Response(clone(this), {\n\t\t\turl: this.url,\n\t\t\tstatus: this.status,\n\t\t\tstatusText: this.statusText,\n\t\t\theaders: this.headers,\n\t\t\tok: this.ok,\n\t\t\tredirected: this.redirected\n\t\t});\n\t}\n}\n\nBody.mixIn(Response.prototype);\n\nObject.defineProperties(Response.prototype, {\n\turl: { enumerable: true },\n\tstatus: { enumerable: true },\n\tok: { enumerable: true },\n\tredirected: { enumerable: true },\n\tstatusText: { enumerable: true },\n\theaders: { enumerable: true },\n\tclone: { enumerable: true }\n});\n\nObject.defineProperty(Response.prototype, Symbol.toStringTag, {\n\tvalue: 'Response',\n\twritable: false,\n\tenumerable: false,\n\tconfigurable: true\n});\n\nconst INTERNALS$2 = Symbol('Request internals');\n\n// fix an issue where \"format\", \"parse\" aren't a named export for node <10\nconst parse_url = Url.parse;\nconst format_url = Url.format;\n\nconst streamDestructionSupported = 'destroy' in Stream.Readable.prototype;\n\n/**\n * Check if a value is an instance of Request.\n *\n * @param Mixed input\n * @return Boolean\n */\nfunction isRequest(input) {\n\treturn typeof input === 'object' && typeof input[INTERNALS$2] === 'object';\n}\n\nfunction isAbortSignal(signal) {\n\tconst proto = signal && typeof signal === 'object' && Object.getPrototypeOf(signal);\n\treturn !!(proto && proto.constructor.name === 'AbortSignal');\n}\n\n/**\n * Request class\n *\n * @param Mixed input Url or Request instance\n * @param Object init Custom options\n * @return Void\n */\nclass Request {\n\tconstructor(input) {\n\t\tlet init = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};\n\n\t\tlet parsedURL;\n\n\t\t// normalize input\n\t\tif (!isRequest(input)) {\n\t\t\tif (input && input.href) {\n\t\t\t\t// in order to support Node.js' Url objects; though WHATWG's URL objects\n\t\t\t\t// will fall into this branch also (since their `toString()` will return\n\t\t\t\t// `href` property anyway)\n\t\t\t\tparsedURL = parse_url(input.href);\n\t\t\t} else {\n\t\t\t\t// coerce input to a string before attempting to parse\n\t\t\t\tparsedURL = parse_url(`${input}`);\n\t\t\t}\n\t\t\tinput = {};\n\t\t} else {\n\t\t\tparsedURL = parse_url(input.url);\n\t\t}\n\n\t\tlet method = init.method || input.method || 'GET';\n\t\tmethod = method.toUpperCase();\n\n\t\tif ((init.body != null || isRequest(input) && input.body !== null) && (method === 'GET' || method === 'HEAD')) {\n\t\t\tthrow new TypeError('Request with GET/HEAD method cannot have body');\n\t\t}\n\n\t\tlet inputBody = init.body != null ? init.body : isRequest(input) && input.body !== null ? clone(input) : null;\n\n\t\tBody.call(this, inputBody, {\n\t\t\ttimeout: init.timeout || input.timeout || 0,\n\t\t\tsize: init.size || input.size || 0\n\t\t});\n\n\t\tconst headers = new Headers(init.headers || input.headers || {});\n\n\t\tif (inputBody != null && !headers.has('Content-Type')) {\n\t\t\tconst contentType = extractContentType(inputBody);\n\t\t\tif (contentType) {\n\t\t\t\theaders.append('Content-Type', contentType);\n\t\t\t}\n\t\t}\n\n\t\tlet signal = isRequest(input) ? input.signal : null;\n\t\tif ('signal' in init) signal = init.signal;\n\n\t\tif (signal != null && !isAbortSignal(signal)) {\n\t\t\tthrow new TypeError('Expected signal to be an instanceof AbortSignal');\n\t\t}\n\n\t\tthis[INTERNALS$2] = {\n\t\t\tmethod,\n\t\t\tredirect: init.redirect || input.redirect || 'follow',\n\t\t\theaders,\n\t\t\tparsedURL,\n\t\t\tsignal\n\t\t};\n\n\t\t// node-fetch-only options\n\t\tthis.follow = init.follow !== undefined ? init.follow : input.follow !== undefined ? input.follow : 20;\n\t\tthis.compress = init.compress !== undefined ? init.compress : input.compress !== undefined ? input.compress : true;\n\t\tthis.counter = init.counter || input.counter || 0;\n\t\tthis.agent = init.agent || input.agent;\n\t}\n\n\tget method() {\n\t\treturn this[INTERNALS$2].method;\n\t}\n\n\tget url() {\n\t\treturn format_url(this[INTERNALS$2].parsedURL);\n\t}\n\n\tget headers() {\n\t\treturn this[INTERNALS$2].headers;\n\t}\n\n\tget redirect() {\n\t\treturn this[INTERNALS$2].redirect;\n\t}\n\n\tget signal() {\n\t\treturn this[INTERNALS$2].signal;\n\t}\n\n\t/**\n * Clone this request\n *\n * @return Request\n */\n\tclone() {\n\t\treturn new Request(this);\n\t}\n}\n\nBody.mixIn(Request.prototype);\n\nObject.defineProperty(Request.prototype, Symbol.toStringTag, {\n\tvalue: 'Request',\n\twritable: false,\n\tenumerable: false,\n\tconfigurable: true\n});\n\nObject.defineProperties(Request.prototype, {\n\tmethod: { enumerable: true },\n\turl: { enumerable: true },\n\theaders: { enumerable: true },\n\tredirect: { enumerable: true },\n\tclone: { enumerable: true },\n\tsignal: { enumerable: true }\n});\n\n/**\n * Convert a Request to Node.js http request options.\n *\n * @param Request A Request instance\n * @return Object The options object to be passed to http.request\n */\nfunction getNodeRequestOptions(request) {\n\tconst parsedURL = request[INTERNALS$2].parsedURL;\n\tconst headers = new Headers(request[INTERNALS$2].headers);\n\n\t// fetch step 1.3\n\tif (!headers.has('Accept')) {\n\t\theaders.set('Accept', '*/*');\n\t}\n\n\t// Basic fetch\n\tif (!parsedURL.protocol || !parsedURL.hostname) {\n\t\tthrow new TypeError('Only absolute URLs are supported');\n\t}\n\n\tif (!/^https?:$/.test(parsedURL.protocol)) {\n\t\tthrow new TypeError('Only HTTP(S) protocols are supported');\n\t}\n\n\tif (request.signal && request.body instanceof Stream.Readable && !streamDestructionSupported) {\n\t\tthrow new Error('Cancellation of streamed requests with AbortSignal is not supported in node < 8');\n\t}\n\n\t// HTTP-network-or-cache fetch steps 2.4-2.7\n\tlet contentLengthValue = null;\n\tif (request.body == null && /^(POST|PUT)$/i.test(request.method)) {\n\t\tcontentLengthValue = '0';\n\t}\n\tif (request.body != null) {\n\t\tconst totalBytes = getTotalBytes(request);\n\t\tif (typeof totalBytes === 'number') {\n\t\t\tcontentLengthValue = String(totalBytes);\n\t\t}\n\t}\n\tif (contentLengthValue) {\n\t\theaders.set('Content-Length', contentLengthValue);\n\t}\n\n\t// HTTP-network-or-cache fetch step 2.11\n\tif (!headers.has('User-Agent')) {\n\t\theaders.set('User-Agent', 'node-fetch/1.0 (+https://github.com/bitinn/node-fetch)');\n\t}\n\n\t// HTTP-network-or-cache fetch step 2.15\n\tif (request.compress && !headers.has('Accept-Encoding')) {\n\t\theaders.set('Accept-Encoding', 'gzip,deflate');\n\t}\n\n\tlet agent = request.agent;\n\tif (typeof agent === 'function') {\n\t\tagent = agent(parsedURL);\n\t}\n\n\tif (!headers.has('Connection') && !agent) {\n\t\theaders.set('Connection', 'close');\n\t}\n\n\t// HTTP-network fetch step 4.2\n\t// chunked encoding is handled by Node.js\n\n\treturn Object.assign({}, parsedURL, {\n\t\tmethod: request.method,\n\t\theaders: exportNodeCompatibleHeaders(headers),\n\t\tagent\n\t});\n}\n\n/**\n * abort-error.js\n *\n * AbortError interface for cancelled requests\n */\n\n/**\n * Create AbortError instance\n *\n * @param String message Error message for human\n * @return AbortError\n */\nfunction AbortError(message) {\n Error.call(this, message);\n\n this.type = 'aborted';\n this.message = message;\n\n // hide custom error implementation details from end-users\n Error.captureStackTrace(this, this.constructor);\n}\n\nAbortError.prototype = Object.create(Error.prototype);\nAbortError.prototype.constructor = AbortError;\nAbortError.prototype.name = 'AbortError';\n\n// fix an issue where \"PassThrough\", \"resolve\" aren't a named export for node <10\nconst PassThrough$1 = Stream.PassThrough;\nconst resolve_url = Url.resolve;\n\n/**\n * Fetch function\n *\n * @param Mixed url Absolute url or Request instance\n * @param Object opts Fetch options\n * @return Promise\n */\nfunction fetch(url, opts) {\n\n\t// allow custom promise\n\tif (!fetch.Promise) {\n\t\tthrow new Error('native promise missing, set fetch.Promise to your favorite alternative');\n\t}\n\n\tBody.Promise = fetch.Promise;\n\n\t// wrap http.request into fetch\n\treturn new fetch.Promise(function (resolve, reject) {\n\t\t// build request object\n\t\tconst request = new Request(url, opts);\n\t\tconst options = getNodeRequestOptions(request);\n\n\t\tconst send = (options.protocol === 'https:' ? https : http).request;\n\t\tconst signal = request.signal;\n\n\t\tlet response = null;\n\n\t\tconst abort = function abort() {\n\t\t\tlet error = new AbortError('The user aborted a request.');\n\t\t\treject(error);\n\t\t\tif (request.body && request.body instanceof Stream.Readable) {\n\t\t\t\trequest.body.destroy(error);\n\t\t\t}\n\t\t\tif (!response || !response.body) return;\n\t\t\tresponse.body.emit('error', error);\n\t\t};\n\n\t\tif (signal && signal.aborted) {\n\t\t\tabort();\n\t\t\treturn;\n\t\t}\n\n\t\tconst abortAndFinalize = function abortAndFinalize() {\n\t\t\tabort();\n\t\t\tfinalize();\n\t\t};\n\n\t\t// send request\n\t\tconst req = send(options);\n\t\tlet reqTimeout;\n\n\t\tif (signal) {\n\t\t\tsignal.addEventListener('abort', abortAndFinalize);\n\t\t}\n\n\t\tfunction finalize() {\n\t\t\treq.abort();\n\t\t\tif (signal) signal.removeEventListener('abort', abortAndFinalize);\n\t\t\tclearTimeout(reqTimeout);\n\t\t}\n\n\t\tif (request.timeout) {\n\t\t\treq.once('socket', function (socket) {\n\t\t\t\treqTimeout = setTimeout(function () {\n\t\t\t\t\treject(new FetchError(`network timeout at: ${request.url}`, 'request-timeout'));\n\t\t\t\t\tfinalize();\n\t\t\t\t}, request.timeout);\n\t\t\t});\n\t\t}\n\n\t\treq.on('error', function (err) {\n\t\t\treject(new FetchError(`request to ${request.url} failed, reason: ${err.message}`, 'system', err));\n\t\t\tfinalize();\n\t\t});\n\n\t\treq.on('response', function (res) {\n\t\t\tclearTimeout(reqTimeout);\n\n\t\t\tconst headers = createHeadersLenient(res.headers);\n\n\t\t\t// HTTP fetch step 5\n\t\t\tif (fetch.isRedirect(res.statusCode)) {\n\t\t\t\t// HTTP fetch step 5.2\n\t\t\t\tconst location = headers.get('Location');\n\n\t\t\t\t// HTTP fetch step 5.3\n\t\t\t\tconst locationURL = location === null ? null : resolve_url(request.url, location);\n\n\t\t\t\t// HTTP fetch step 5.5\n\t\t\t\tswitch (request.redirect) {\n\t\t\t\t\tcase 'error':\n\t\t\t\t\t\treject(new FetchError(`uri requested responds with a redirect, redirect mode is set to error: ${request.url}`, 'no-redirect'));\n\t\t\t\t\t\tfinalize();\n\t\t\t\t\t\treturn;\n\t\t\t\t\tcase 'manual':\n\t\t\t\t\t\t// node-fetch-specific step: make manual redirect a bit easier to use by setting the Location header value to the resolved URL.\n\t\t\t\t\t\tif (locationURL !== null) {\n\t\t\t\t\t\t\t// handle corrupted header\n\t\t\t\t\t\t\ttry {\n\t\t\t\t\t\t\t\theaders.set('Location', locationURL);\n\t\t\t\t\t\t\t} catch (err) {\n\t\t\t\t\t\t\t\t// istanbul ignore next: nodejs server prevent invalid response headers, we can't test this through normal request\n\t\t\t\t\t\t\t\treject(err);\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\t\t\t\t\t\tbreak;\n\t\t\t\t\tcase 'follow':\n\t\t\t\t\t\t// HTTP-redirect fetch step 2\n\t\t\t\t\t\tif (locationURL === null) {\n\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 5\n\t\t\t\t\t\tif (request.counter >= request.follow) {\n\t\t\t\t\t\t\treject(new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect'));\n\t\t\t\t\t\t\tfinalize();\n\t\t\t\t\t\t\treturn;\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 6 (counter increment)\n\t\t\t\t\t\t// Create a new Request object.\n\t\t\t\t\t\tconst requestOpts = {\n\t\t\t\t\t\t\theaders: new Headers(request.headers),\n\t\t\t\t\t\t\tfollow: request.follow,\n\t\t\t\t\t\t\tcounter: request.counter + 1,\n\t\t\t\t\t\t\tagent: request.agent,\n\t\t\t\t\t\t\tcompress: request.compress,\n\t\t\t\t\t\t\tmethod: request.method,\n\t\t\t\t\t\t\tbody: request.body,\n\t\t\t\t\t\t\tsignal: request.signal,\n\t\t\t\t\t\t\ttimeout: request.timeout,\n\t\t\t\t\t\t\tsize: request.size\n\t\t\t\t\t\t};\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 9\n\t\t\t\t\t\tif (res.statusCode !== 303 && request.body && getTotalBytes(request) === null) {\n\t\t\t\t\t\t\treject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect'));\n\t\t\t\t\t\t\tfinalize();\n\t\t\t\t\t\t\treturn;\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 11\n\t\t\t\t\t\tif (res.statusCode === 303 || (res.statusCode === 301 || res.statusCode === 302) && request.method === 'POST') {\n\t\t\t\t\t\t\trequestOpts.method = 'GET';\n\t\t\t\t\t\t\trequestOpts.body = undefined;\n\t\t\t\t\t\t\trequestOpts.headers.delete('content-length');\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 15\n\t\t\t\t\t\tresolve(fetch(new Request(locationURL, requestOpts)));\n\t\t\t\t\t\tfinalize();\n\t\t\t\t\t\treturn;\n\t\t\t\t}\n\t\t\t}\n\n\t\t\t// prepare response\n\t\t\tres.once('end', function () {\n\t\t\t\tif (signal) signal.removeEventListener('abort', abortAndFinalize);\n\t\t\t});\n\t\t\tlet body = res.pipe(new PassThrough$1());\n\n\t\t\tconst response_options = {\n\t\t\t\turl: request.url,\n\t\t\t\tstatus: res.statusCode,\n\t\t\t\tstatusText: res.statusMessage,\n\t\t\t\theaders: headers,\n\t\t\t\tsize: request.size,\n\t\t\t\ttimeout: request.timeout,\n\t\t\t\tcounter: request.counter\n\t\t\t};\n\n\t\t\t// HTTP-network fetch step 12.1.1.3\n\t\t\tconst codings = headers.get('Content-Encoding');\n\n\t\t\t// HTTP-network fetch step 12.1.1.4: handle content codings\n\n\t\t\t// in following scenarios we ignore compression support\n\t\t\t// 1. compression support is disabled\n\t\t\t// 2. HEAD request\n\t\t\t// 3. no Content-Encoding header\n\t\t\t// 4. no content response (204)\n\t\t\t// 5. content not modified response (304)\n\t\t\tif (!request.compress || request.method === 'HEAD' || codings === null || res.statusCode === 204 || res.statusCode === 304) {\n\t\t\t\tresponse = new Response(body, response_options);\n\t\t\t\tresolve(response);\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\t// For Node v6+\n\t\t\t// Be less strict when decoding compressed responses, since sometimes\n\t\t\t// servers send slightly invalid responses that are still accepted\n\t\t\t// by common browsers.\n\t\t\t// Always using Z_SYNC_FLUSH is what cURL does.\n\t\t\tconst zlibOptions = {\n\t\t\t\tflush: zlib.Z_SYNC_FLUSH,\n\t\t\t\tfinishFlush: zlib.Z_SYNC_FLUSH\n\t\t\t};\n\n\t\t\t// for gzip\n\t\t\tif (codings == 'gzip' || codings == 'x-gzip') {\n\t\t\t\tbody = body.pipe(zlib.createGunzip(zlibOptions));\n\t\t\t\tresponse = new Response(body, response_options);\n\t\t\t\tresolve(response);\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\t// for deflate\n\t\t\tif (codings == 'deflate' || codings == 'x-deflate') {\n\t\t\t\t// handle the infamous raw deflate response from old servers\n\t\t\t\t// a hack for old IIS and Apache servers\n\t\t\t\tconst raw = res.pipe(new PassThrough$1());\n\t\t\t\traw.once('data', function (chunk) {\n\t\t\t\t\t// see http://stackoverflow.com/questions/37519828\n\t\t\t\t\tif ((chunk[0] & 0x0F) === 0x08) {\n\t\t\t\t\t\tbody = body.pipe(zlib.createInflate());\n\t\t\t\t\t} else {\n\t\t\t\t\t\tbody = body.pipe(zlib.createInflateRaw());\n\t\t\t\t\t}\n\t\t\t\t\tresponse = new Response(body, response_options);\n\t\t\t\t\tresolve(response);\n\t\t\t\t});\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\t// for br\n\t\t\tif (codings == 'br' && typeof zlib.createBrotliDecompress === 'function') {\n\t\t\t\tbody = body.pipe(zlib.createBrotliDecompress());\n\t\t\t\tresponse = new Response(body, response_options);\n\t\t\t\tresolve(response);\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\t// otherwise, use response as-is\n\t\t\tresponse = new Response(body, response_options);\n\t\t\tresolve(response);\n\t\t});\n\n\t\twriteToStream(req, request);\n\t});\n}\n/**\n * Redirect code matching\n *\n * @param Number code Status code\n * @return Boolean\n */\nfetch.isRedirect = function (code) {\n\treturn code === 301 || code === 302 || code === 303 || code === 307 || code === 308;\n};\n\n// expose Promise\nfetch.Promise = global.Promise;\n\nmodule.exports = exports = fetch;\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.default = exports;\nexports.Headers = Headers;\nexports.Request = Request;\nexports.Response = Response;\nexports.FetchError = FetchError;\n","var wrappy = require('wrappy')\nmodule.exports = wrappy(once)\nmodule.exports.strict = wrappy(onceStrict)\n\nonce.proto = once(function () {\n Object.defineProperty(Function.prototype, 'once', {\n value: function () {\n return once(this)\n },\n configurable: true\n })\n\n Object.defineProperty(Function.prototype, 'onceStrict', {\n value: function () {\n return onceStrict(this)\n },\n configurable: true\n })\n})\n\nfunction once (fn) {\n var f = function () {\n if (f.called) return f.value\n f.called = true\n return f.value = fn.apply(this, arguments)\n }\n f.called = false\n return f\n}\n\nfunction onceStrict (fn) {\n var f = function () {\n if (f.called)\n throw new Error(f.onceError)\n f.called = true\n return f.value = fn.apply(this, arguments)\n }\n var name = fn.name || 'Function wrapped with `once`'\n f.onceError = name + \" shouldn't be called more than once\"\n f.called = false\n return f\n}\n","'use strict';\n\nfunction posix(path) {\n\treturn path.charAt(0) === '/';\n}\n\nfunction win32(path) {\n\t// https://github.com/nodejs/node/blob/b3fcc245fb25539909ef1d5eaa01dbf92e168633/lib/path.js#L56\n\tvar splitDeviceRe = /^([a-zA-Z]:|[\\\\\\/]{2}[^\\\\\\/]+[\\\\\\/]+[^\\\\\\/]+)?([\\\\\\/])?([\\s\\S]*?)$/;\n\tvar result = splitDeviceRe.exec(path);\n\tvar device = result[1] || '';\n\tvar isUnc = Boolean(device && device.charAt(1) !== ':');\n\n\t// UNC paths are always absolute\n\treturn Boolean(result[2] || isUnc);\n}\n\nmodule.exports = process.platform === 'win32' ? win32 : posix;\nmodule.exports.posix = posix;\nmodule.exports.win32 = win32;\n","module.exports = require('./lib/tunnel');\n","'use strict';\n\nvar net = require('net');\nvar tls = require('tls');\nvar http = require('http');\nvar https = require('https');\nvar events = require('events');\nvar assert = require('assert');\nvar util = require('util');\n\n\nexports.httpOverHttp = httpOverHttp;\nexports.httpsOverHttp = httpsOverHttp;\nexports.httpOverHttps = httpOverHttps;\nexports.httpsOverHttps = httpsOverHttps;\n\n\nfunction httpOverHttp(options) {\n var agent = new TunnelingAgent(options);\n agent.request = http.request;\n return agent;\n}\n\nfunction httpsOverHttp(options) {\n var agent = new TunnelingAgent(options);\n agent.request = http.request;\n agent.createSocket = createSecureSocket;\n agent.defaultPort = 443;\n return agent;\n}\n\nfunction httpOverHttps(options) {\n var agent = new TunnelingAgent(options);\n agent.request = https.request;\n return agent;\n}\n\nfunction httpsOverHttps(options) {\n var agent = new TunnelingAgent(options);\n agent.request = https.request;\n agent.createSocket = createSecureSocket;\n agent.defaultPort = 443;\n return agent;\n}\n\n\nfunction TunnelingAgent(options) {\n var self = this;\n self.options = options || {};\n self.proxyOptions = self.options.proxy || {};\n self.maxSockets = self.options.maxSockets || http.Agent.defaultMaxSockets;\n self.requests = [];\n self.sockets = [];\n\n self.on('free', function onFree(socket, host, port, localAddress) {\n var options = toOptions(host, port, localAddress);\n for (var i = 0, len = self.requests.length; i < len; ++i) {\n var pending = self.requests[i];\n if (pending.host === options.host && pending.port === options.port) {\n // Detect the request to connect same origin server,\n // reuse the connection.\n self.requests.splice(i, 1);\n pending.request.onSocket(socket);\n return;\n }\n }\n socket.destroy();\n self.removeSocket(socket);\n });\n}\nutil.inherits(TunnelingAgent, events.EventEmitter);\n\nTunnelingAgent.prototype.addRequest = function addRequest(req, host, port, localAddress) {\n var self = this;\n var options = mergeOptions({request: req}, self.options, toOptions(host, port, localAddress));\n\n if (self.sockets.length >= this.maxSockets) {\n // We are over limit so we'll add it to the queue.\n self.requests.push(options);\n return;\n }\n\n // If we are under maxSockets create a new one.\n self.createSocket(options, function(socket) {\n socket.on('free', onFree);\n socket.on('close', onCloseOrRemove);\n socket.on('agentRemove', onCloseOrRemove);\n req.onSocket(socket);\n\n function onFree() {\n self.emit('free', socket, options);\n }\n\n function onCloseOrRemove(err) {\n self.removeSocket(socket);\n socket.removeListener('free', onFree);\n socket.removeListener('close', onCloseOrRemove);\n socket.removeListener('agentRemove', onCloseOrRemove);\n }\n });\n};\n\nTunnelingAgent.prototype.createSocket = function createSocket(options, cb) {\n var self = this;\n var placeholder = {};\n self.sockets.push(placeholder);\n\n var connectOptions = mergeOptions({}, self.proxyOptions, {\n method: 'CONNECT',\n path: options.host + ':' + options.port,\n agent: false,\n headers: {\n host: options.host + ':' + options.port\n }\n });\n if (options.localAddress) {\n connectOptions.localAddress = options.localAddress;\n }\n if (connectOptions.proxyAuth) {\n connectOptions.headers = connectOptions.headers || {};\n connectOptions.headers['Proxy-Authorization'] = 'Basic ' +\n new Buffer(connectOptions.proxyAuth).toString('base64');\n }\n\n debug('making CONNECT request');\n var connectReq = self.request(connectOptions);\n connectReq.useChunkedEncodingByDefault = false; // for v0.6\n connectReq.once('response', onResponse); // for v0.6\n connectReq.once('upgrade', onUpgrade); // for v0.6\n connectReq.once('connect', onConnect); // for v0.7 or later\n connectReq.once('error', onError);\n connectReq.end();\n\n function onResponse(res) {\n // Very hacky. This is necessary to avoid http-parser leaks.\n res.upgrade = true;\n }\n\n function onUpgrade(res, socket, head) {\n // Hacky.\n process.nextTick(function() {\n onConnect(res, socket, head);\n });\n }\n\n function onConnect(res, socket, head) {\n connectReq.removeAllListeners();\n socket.removeAllListeners();\n\n if (res.statusCode !== 200) {\n debug('tunneling socket could not be established, statusCode=%d',\n res.statusCode);\n socket.destroy();\n var error = new Error('tunneling socket could not be established, ' +\n 'statusCode=' + res.statusCode);\n error.code = 'ECONNRESET';\n options.request.emit('error', error);\n self.removeSocket(placeholder);\n return;\n }\n if (head.length > 0) {\n debug('got illegal response body from proxy');\n socket.destroy();\n var error = new Error('got illegal response body from proxy');\n error.code = 'ECONNRESET';\n options.request.emit('error', error);\n self.removeSocket(placeholder);\n return;\n }\n debug('tunneling connection has established');\n self.sockets[self.sockets.indexOf(placeholder)] = socket;\n return cb(socket);\n }\n\n function onError(cause) {\n connectReq.removeAllListeners();\n\n debug('tunneling socket could not be established, cause=%s\\n',\n cause.message, cause.stack);\n var error = new Error('tunneling socket could not be established, ' +\n 'cause=' + cause.message);\n error.code = 'ECONNRESET';\n options.request.emit('error', error);\n self.removeSocket(placeholder);\n }\n};\n\nTunnelingAgent.prototype.removeSocket = function removeSocket(socket) {\n var pos = this.sockets.indexOf(socket)\n if (pos === -1) {\n return;\n }\n this.sockets.splice(pos, 1);\n\n var pending = this.requests.shift();\n if (pending) {\n // If we have pending requests and a socket gets closed a new one\n // needs to be created to take over in the pool for the one that closed.\n this.createSocket(pending, function(socket) {\n pending.request.onSocket(socket);\n });\n }\n};\n\nfunction createSecureSocket(options, cb) {\n var self = this;\n TunnelingAgent.prototype.createSocket.call(self, options, function(socket) {\n var hostHeader = options.request.getHeader('host');\n var tlsOptions = mergeOptions({}, self.options, {\n socket: socket,\n servername: hostHeader ? hostHeader.replace(/:.*$/, '') : options.host\n });\n\n // 0 is dummy port for v0.6\n var secureSocket = tls.connect(0, tlsOptions);\n self.sockets[self.sockets.indexOf(socket)] = secureSocket;\n cb(secureSocket);\n });\n}\n\n\nfunction toOptions(host, port, localAddress) {\n if (typeof host === 'string') { // since v0.10\n return {\n host: host,\n port: port,\n localAddress: localAddress\n };\n }\n return host; // for v0.11 or later\n}\n\nfunction mergeOptions(target) {\n for (var i = 1, len = arguments.length; i < len; ++i) {\n var overrides = arguments[i];\n if (typeof overrides === 'object') {\n var keys = Object.keys(overrides);\n for (var j = 0, keyLen = keys.length; j < keyLen; ++j) {\n var k = keys[j];\n if (overrides[k] !== undefined) {\n target[k] = overrides[k];\n }\n }\n }\n }\n return target;\n}\n\n\nvar debug;\nif (process.env.NODE_DEBUG && /\\btunnel\\b/.test(process.env.NODE_DEBUG)) {\n debug = function() {\n var args = Array.prototype.slice.call(arguments);\n if (typeof args[0] === 'string') {\n args[0] = 'TUNNEL: ' + args[0];\n } else {\n args.unshift('TUNNEL:');\n }\n console.error.apply(console, args);\n }\n} else {\n debug = function() {};\n}\nexports.debug = debug; // for test\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nfunction getUserAgent() {\n if (typeof navigator === \"object\" && \"userAgent\" in navigator) {\n return navigator.userAgent;\n }\n\n if (typeof process === \"object\" && \"version\" in process) {\n return `Node.js/${process.version.substr(1)} (${process.platform}; ${process.arch})`;\n }\n\n return \"\";\n}\n\nexports.getUserAgent = getUserAgent;\n//# sourceMappingURL=index.js.map\n","'use strict';\nconst os = require('os');\n\nconst homeDirectory = os.homedir();\n\nmodule.exports = pathWithTilde => {\n\tif (typeof pathWithTilde !== 'string') {\n\t\tthrow new TypeError(`Expected a string, got ${typeof pathWithTilde}`);\n\t}\n\n\treturn homeDirectory ? pathWithTilde.replace(/^~(?=$|\\/|\\\\)/, homeDirectory) : pathWithTilde;\n};\n","// Returns a wrapper function that returns a wrapped callback\n// The wrapper function should do some stuff, and return a\n// presumably different callback function.\n// This makes sure that own properties are retained, so that\n// decorations and such are not lost along the way.\nmodule.exports = wrappy\nfunction wrappy (fn, cb) {\n if (fn && cb) return wrappy(fn)(cb)\n\n if (typeof fn !== 'function')\n throw new TypeError('need wrapper function')\n\n Object.keys(fn).forEach(function (k) {\n wrapper[k] = fn[k]\n })\n\n return wrapper\n\n function wrapper() {\n var args = new Array(arguments.length)\n for (var i = 0; i < args.length; i++) {\n args[i] = arguments[i]\n }\n var ret = fn.apply(this, args)\n var cb = args[args.length-1]\n if (typeof ret === 'function' && ret !== cb) {\n Object.keys(cb).forEach(function (k) {\n ret[k] = cb[k]\n })\n }\n return ret\n }\n}\n",null,"module.exports = require(\"assert\");","module.exports = require(\"events\");","module.exports = require(\"fs\");","module.exports = require(\"http\");","module.exports = require(\"https\");","module.exports = require(\"net\");","module.exports = require(\"os\");","module.exports = require(\"path\");","module.exports = require(\"stream\");","module.exports = require(\"tls\");","module.exports = require(\"url\");","module.exports = require(\"util\");","module.exports = require(\"zlib\");","// The module cache\nvar __webpack_module_cache__ = {};\n\n// The require function\nfunction __webpack_require__(moduleId) {\n\t// Check if module is in cache\n\tvar cachedModule = __webpack_module_cache__[moduleId];\n\tif (cachedModule !== undefined) {\n\t\treturn cachedModule.exports;\n\t}\n\t// Create a new module (and put it into the cache)\n\tvar module = __webpack_module_cache__[moduleId] = {\n\t\t// no module.id needed\n\t\t// no module.loaded needed\n\t\texports: {}\n\t};\n\n\t// Execute the module function\n\tvar threw = true;\n\ttry {\n\t\t__webpack_modules__[moduleId].call(module.exports, module, module.exports, __webpack_require__);\n\t\tthrew = false;\n\t} finally {\n\t\tif(threw) delete __webpack_module_cache__[moduleId];\n\t}\n\n\t// Return the exports of the module\n\treturn module.exports;\n}\n\n","\nif (typeof __webpack_require__ !== 'undefined') __webpack_require__.ab = __dirname + \"/\";","","// startup\n// Load entry module and return exports\n// This entry module is referenced by other modules so it can't be inlined\nvar __webpack_exports__ = __webpack_require__(1769);\n",""],"names":[],"sourceRoot":""} \ No newline at end of file diff --git a/dist/licenses.txt b/dist/licenses.txt new file mode 100644 index 00000000..f5e218f1 --- /dev/null +++ b/dist/licenses.txt @@ -0,0 +1,818 @@ +@actions/core +MIT +The MIT License (MIT) + +Copyright 2019 GitHub + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +@actions/github +MIT +The MIT License (MIT) + +Copyright 2019 GitHub + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +@actions/http-client +MIT +Actions Http Client for Node.js + +Copyright (c) GitHub, Inc. + +All rights reserved. + +MIT License + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and +associated documentation files (the "Software"), to deal in the Software without restriction, +including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT +LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + + +@octokit/auth-token +MIT +The MIT License + +Copyright (c) 2019 Octokit contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. + + +@octokit/core +MIT +The MIT License + +Copyright (c) 2019 Octokit contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. + + +@octokit/endpoint +MIT +The MIT License + +Copyright (c) 2018 Octokit contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. + + +@octokit/graphql +MIT +The MIT License + +Copyright (c) 2018 Octokit contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. + + +@octokit/plugin-paginate-rest +MIT +MIT License Copyright (c) 2019 Octokit contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice (including the next paragraph) shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + + +@octokit/plugin-rest-endpoint-methods +MIT +MIT License Copyright (c) 2019 Octokit contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice (including the next paragraph) shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + + +@octokit/request +MIT +The MIT License + +Copyright (c) 2018 Octokit contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. + + +@octokit/request-error +MIT +The MIT License + +Copyright (c) 2019 Octokit contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. + + +@vercel/ncc +MIT +Copyright 2018 ZEIT, Inc. + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +balanced-match +MIT +(MIT) + +Copyright (c) 2013 Julian Gruber <julian@juliangruber.com> + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + + +before-after-hook +Apache-2.0 + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018 Gregor Martynus and other contributors. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + +brace-expansion +MIT +MIT License + +Copyright (c) 2013 Julian Gruber + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + + +concat-map +MIT +This software is released under the MIT license: + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + + +deprecation +ISC +The ISC License + +Copyright (c) Gregor Martynus and contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + + +fs.realpath +ISC +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +---- + +This library bundles a version of the `fs.realpath` and `fs.realpathSync` +methods from Node.js v0.10 under the terms of the Node.js MIT license. + +Node's license follows, also included at the header of `old.js` which contains +the licensed code: + + Copyright Joyent, Inc. and other Node contributors. + + Permission is hereby granted, free of charge, to any person obtaining a + copy of this software and associated documentation files (the "Software"), + to deal in the Software without restriction, including without limitation + the rights to use, copy, modify, merge, publish, distribute, sublicense, + and/or sell copies of the Software, and to permit persons to whom the + Software is furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in + all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER + DEALINGS IN THE SOFTWARE. + + +glob +ISC +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +## Glob Logo + +Glob's logo created by Tanya Brassie , licensed +under a Creative Commons Attribution-ShareAlike 4.0 International License +https://creativecommons.org/licenses/by-sa/4.0/ + + +inflight +ISC +The ISC License + +Copyright (c) Isaac Z. Schlueter + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + + +inherits +ISC +The ISC License + +Copyright (c) Isaac Z. Schlueter + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND +FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. + + + +is-plain-object +MIT +The MIT License (MIT) + +Copyright (c) 2014-2017, Jon Schlinkert. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. + + +minimatch +ISC +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + + +node-fetch +MIT +The MIT License (MIT) + +Copyright (c) 2016 David Frank + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + + + +once +ISC +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + + +path-is-absolute +MIT +The MIT License (MIT) + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. + + +tunnel +MIT +The MIT License (MIT) + +Copyright (c) 2012 Koichi Kobayashi + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. + + +universal-user-agent +ISC +# [ISC License](https://spdx.org/licenses/ISC) + +Copyright (c) 2018, Gregor Martynus (https://github.com/gr2m) + +Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + + +untildify +MIT +MIT License + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + + +wrappy +ISC +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/dist/sourcemap-register.js b/dist/sourcemap-register.js new file mode 100644 index 00000000..46ad4d42 --- /dev/null +++ b/dist/sourcemap-register.js @@ -0,0 +1 @@ +(()=>{var e={650:e=>{var r=Object.prototype.toString;var n=typeof Buffer.alloc==="function"&&typeof Buffer.allocUnsafe==="function"&&typeof Buffer.from==="function";function isArrayBuffer(e){return r.call(e).slice(8,-1)==="ArrayBuffer"}function fromArrayBuffer(e,r,t){r>>>=0;var o=e.byteLength-r;if(o<0){throw new RangeError("'offset' is out of bounds")}if(t===undefined){t=o}else{t>>>=0;if(t>o){throw new RangeError("'length' is out of bounds")}}return n?Buffer.from(e.slice(r,r+t)):new Buffer(new Uint8Array(e.slice(r,r+t)))}function fromString(e,r){if(typeof r!=="string"||r===""){r="utf8"}if(!Buffer.isEncoding(r)){throw new TypeError('"encoding" must be a valid string encoding')}return n?Buffer.from(e,r):new Buffer(e,r)}function bufferFrom(e,r,t){if(typeof e==="number"){throw new TypeError('"value" argument must not be a number')}if(isArrayBuffer(e)){return fromArrayBuffer(e,r,t)}if(typeof e==="string"){return fromString(e,r)}return n?Buffer.from(e):new Buffer(e)}e.exports=bufferFrom},284:(e,r,n)=>{e=n.nmd(e);var t=n(596).SourceMapConsumer;var o=n(17);var i;try{i=n(147);if(!i.existsSync||!i.readFileSync){i=null}}catch(e){}var a=n(650);function dynamicRequire(e,r){return e.require(r)}var u=false;var s=false;var l=false;var c="auto";var p={};var f={};var g=/^data:application\/json[^,]+base64,/;var h=[];var d=[];function isInBrowser(){if(c==="browser")return true;if(c==="node")return false;return typeof window!=="undefined"&&typeof XMLHttpRequest==="function"&&!(window.require&&window.module&&window.process&&window.process.type==="renderer")}function hasGlobalProcessEventEmitter(){return typeof process==="object"&&process!==null&&typeof process.on==="function"}function handlerExec(e){return function(r){for(var n=0;n"}var n=this.getLineNumber();if(n!=null){r+=":"+n;var t=this.getColumnNumber();if(t){r+=":"+t}}}var o="";var i=this.getFunctionName();var a=true;var u=this.isConstructor();var s=!(this.isToplevel()||u);if(s){var l=this.getTypeName();if(l==="[object Object]"){l="null"}var c=this.getMethodName();if(i){if(l&&i.indexOf(l)!=0){o+=l+"."}o+=i;if(c&&i.indexOf("."+c)!=i.length-c.length-1){o+=" [as "+c+"]"}}else{o+=l+"."+(c||"")}}else if(u){o+="new "+(i||"")}else if(i){o+=i}else{o+=r;a=false}if(a){o+=" ("+r+")"}return o}function cloneCallSite(e){var r={};Object.getOwnPropertyNames(Object.getPrototypeOf(e)).forEach((function(n){r[n]=/^(?:is|get)/.test(n)?function(){return e[n].call(e)}:e[n]}));r.toString=CallSiteToString;return r}function wrapCallSite(e,r){if(r===undefined){r={nextPosition:null,curPosition:null}}if(e.isNative()){r.curPosition=null;return e}var n=e.getFileName()||e.getScriptNameOrSourceURL();if(n){var t=e.getLineNumber();var o=e.getColumnNumber()-1;var i=/^v(10\.1[6-9]|10\.[2-9][0-9]|10\.[0-9]{3,}|1[2-9]\d*|[2-9]\d|\d{3,}|11\.11)/;var a=i.test(process.version)?0:62;if(t===1&&o>a&&!isInBrowser()&&!e.isEval()){o-=a}var u=mapSourcePosition({source:n,line:t,column:o});r.curPosition=u;e=cloneCallSite(e);var s=e.getFunctionName;e.getFunctionName=function(){if(r.nextPosition==null){return s()}return r.nextPosition.name||s()};e.getFileName=function(){return u.source};e.getLineNumber=function(){return u.line};e.getColumnNumber=function(){return u.column+1};e.getScriptNameOrSourceURL=function(){return u.source};return e}var l=e.isEval()&&e.getEvalOrigin();if(l){l=mapEvalOrigin(l);e=cloneCallSite(e);e.getEvalOrigin=function(){return l};return e}return e}function prepareStackTrace(e,r){if(l){p={};f={}}var n=e.name||"Error";var t=e.message||"";var o=n+": "+t;var i={nextPosition:null,curPosition:null};var a=[];for(var u=r.length-1;u>=0;u--){a.push("\n at "+wrapCallSite(r[u],i));i.nextPosition=i.curPosition}i.curPosition=i.nextPosition=null;return o+a.reverse().join("")}function getErrorSource(e){var r=/\n at [^(]+ \((.*):(\d+):(\d+)\)/.exec(e.stack);if(r){var n=r[1];var t=+r[2];var o=+r[3];var a=p[n];if(!a&&i&&i.existsSync(n)){try{a=i.readFileSync(n,"utf8")}catch(e){a=""}}if(a){var u=a.split(/(?:\r\n|\r|\n)/)[t-1];if(u){return n+":"+t+"\n"+u+"\n"+new Array(o).join(" ")+"^"}}}return null}function printErrorAndExit(e){var r=getErrorSource(e);if(process.stderr._handle&&process.stderr._handle.setBlocking){process.stderr._handle.setBlocking(true)}if(r){console.error();console.error(r)}console.error(e.stack);process.exit(1)}function shimEmitUncaughtException(){var e=process.emit;process.emit=function(r){if(r==="uncaughtException"){var n=arguments[1]&&arguments[1].stack;var t=this.listeners(r).length>0;if(n&&!t){return printErrorAndExit(arguments[1])}}return e.apply(this,arguments)}}var S=h.slice(0);var _=d.slice(0);r.wrapCallSite=wrapCallSite;r.getErrorSource=getErrorSource;r.mapSourcePosition=mapSourcePosition;r.retrieveSourceMap=v;r.install=function(r){r=r||{};if(r.environment){c=r.environment;if(["node","browser","auto"].indexOf(c)===-1){throw new Error("environment "+c+" was unknown. Available options are {auto, browser, node}")}}if(r.retrieveFile){if(r.overrideRetrieveFile){h.length=0}h.unshift(r.retrieveFile)}if(r.retrieveSourceMap){if(r.overrideRetrieveSourceMap){d.length=0}d.unshift(r.retrieveSourceMap)}if(r.hookRequire&&!isInBrowser()){var n=dynamicRequire(e,"module");var t=n.prototype._compile;if(!t.__sourceMapSupport){n.prototype._compile=function(e,r){p[r]=e;f[r]=undefined;return t.call(this,e,r)};n.prototype._compile.__sourceMapSupport=true}}if(!l){l="emptyCacheBetweenOperations"in r?r.emptyCacheBetweenOperations:false}if(!u){u=true;Error.prepareStackTrace=prepareStackTrace}if(!s){var o="handleUncaughtExceptions"in r?r.handleUncaughtExceptions:true;try{var i=dynamicRequire(e,"worker_threads");if(i.isMainThread===false){o=false}}catch(e){}if(o&&hasGlobalProcessEventEmitter()){s=true;shimEmitUncaughtException()}}};r.resetRetrieveHandlers=function(){h.length=0;d.length=0;h=S.slice(0);d=_.slice(0);v=handlerExec(d);m=handlerExec(h)}},837:(e,r,n)=>{var t=n(983);var o=Object.prototype.hasOwnProperty;var i=typeof Map!=="undefined";function ArraySet(){this._array=[];this._set=i?new Map:Object.create(null)}ArraySet.fromArray=function ArraySet_fromArray(e,r){var n=new ArraySet;for(var t=0,o=e.length;t=0){return r}}else{var n=t.toSetString(e);if(o.call(this._set,n)){return this._set[n]}}throw new Error('"'+e+'" is not in the set.')};ArraySet.prototype.at=function ArraySet_at(e){if(e>=0&&e{var t=n(537);var o=5;var i=1<>1;return r?-n:n}r.encode=function base64VLQ_encode(e){var r="";var n;var i=toVLQSigned(e);do{n=i&a;i>>>=o;if(i>0){n|=u}r+=t.encode(n)}while(i>0);return r};r.decode=function base64VLQ_decode(e,r,n){var i=e.length;var s=0;var l=0;var c,p;do{if(r>=i){throw new Error("Expected more digits in base 64 VLQ value.")}p=t.decode(e.charCodeAt(r++));if(p===-1){throw new Error("Invalid base64 digit: "+e.charAt(r-1))}c=!!(p&u);p&=a;s=s+(p<{var n="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/".split("");r.encode=function(e){if(0<=e&&e{r.GREATEST_LOWER_BOUND=1;r.LEAST_UPPER_BOUND=2;function recursiveSearch(e,n,t,o,i,a){var u=Math.floor((n-e)/2)+e;var s=i(t,o[u],true);if(s===0){return u}else if(s>0){if(n-u>1){return recursiveSearch(u,n,t,o,i,a)}if(a==r.LEAST_UPPER_BOUND){return n1){return recursiveSearch(e,u,t,o,i,a)}if(a==r.LEAST_UPPER_BOUND){return u}else{return e<0?-1:e}}}r.search=function search(e,n,t,o){if(n.length===0){return-1}var i=recursiveSearch(-1,n.length,e,n,t,o||r.GREATEST_LOWER_BOUND);if(i<0){return-1}while(i-1>=0){if(t(n[i],n[i-1],true)!==0){break}--i}return i}},740:(e,r,n)=>{var t=n(983);function generatedPositionAfter(e,r){var n=e.generatedLine;var o=r.generatedLine;var i=e.generatedColumn;var a=r.generatedColumn;return o>n||o==n&&a>=i||t.compareByGeneratedPositionsInflated(e,r)<=0}function MappingList(){this._array=[];this._sorted=true;this._last={generatedLine:-1,generatedColumn:0}}MappingList.prototype.unsortedForEach=function MappingList_forEach(e,r){this._array.forEach(e,r)};MappingList.prototype.add=function MappingList_add(e){if(generatedPositionAfter(this._last,e)){this._last=e;this._array.push(e)}else{this._sorted=false;this._array.push(e)}};MappingList.prototype.toArray=function MappingList_toArray(){if(!this._sorted){this._array.sort(t.compareByGeneratedPositionsInflated);this._sorted=true}return this._array};r.H=MappingList},226:(e,r)=>{function swap(e,r,n){var t=e[r];e[r]=e[n];e[n]=t}function randomIntInRange(e,r){return Math.round(e+Math.random()*(r-e))}function doQuickSort(e,r,n,t){if(n{var t;var o=n(983);var i=n(164);var a=n(837).I;var u=n(215);var s=n(226).U;function SourceMapConsumer(e,r){var n=e;if(typeof e==="string"){n=o.parseSourceMapInput(e)}return n.sections!=null?new IndexedSourceMapConsumer(n,r):new BasicSourceMapConsumer(n,r)}SourceMapConsumer.fromSourceMap=function(e,r){return BasicSourceMapConsumer.fromSourceMap(e,r)};SourceMapConsumer.prototype._version=3;SourceMapConsumer.prototype.__generatedMappings=null;Object.defineProperty(SourceMapConsumer.prototype,"_generatedMappings",{configurable:true,enumerable:true,get:function(){if(!this.__generatedMappings){this._parseMappings(this._mappings,this.sourceRoot)}return this.__generatedMappings}});SourceMapConsumer.prototype.__originalMappings=null;Object.defineProperty(SourceMapConsumer.prototype,"_originalMappings",{configurable:true,enumerable:true,get:function(){if(!this.__originalMappings){this._parseMappings(this._mappings,this.sourceRoot)}return this.__originalMappings}});SourceMapConsumer.prototype._charIsMappingSeparator=function SourceMapConsumer_charIsMappingSeparator(e,r){var n=e.charAt(r);return n===";"||n===","};SourceMapConsumer.prototype._parseMappings=function SourceMapConsumer_parseMappings(e,r){throw new Error("Subclasses must implement _parseMappings")};SourceMapConsumer.GENERATED_ORDER=1;SourceMapConsumer.ORIGINAL_ORDER=2;SourceMapConsumer.GREATEST_LOWER_BOUND=1;SourceMapConsumer.LEAST_UPPER_BOUND=2;SourceMapConsumer.prototype.eachMapping=function SourceMapConsumer_eachMapping(e,r,n){var t=r||null;var i=n||SourceMapConsumer.GENERATED_ORDER;var a;switch(i){case SourceMapConsumer.GENERATED_ORDER:a=this._generatedMappings;break;case SourceMapConsumer.ORIGINAL_ORDER:a=this._originalMappings;break;default:throw new Error("Unknown order of iteration.")}var u=this.sourceRoot;a.map((function(e){var r=e.source===null?null:this._sources.at(e.source);r=o.computeSourceURL(u,r,this._sourceMapURL);return{source:r,generatedLine:e.generatedLine,generatedColumn:e.generatedColumn,originalLine:e.originalLine,originalColumn:e.originalColumn,name:e.name===null?null:this._names.at(e.name)}}),this).forEach(e,t)};SourceMapConsumer.prototype.allGeneratedPositionsFor=function SourceMapConsumer_allGeneratedPositionsFor(e){var r=o.getArg(e,"line");var n={source:o.getArg(e,"source"),originalLine:r,originalColumn:o.getArg(e,"column",0)};n.source=this._findSourceIndex(n.source);if(n.source<0){return[]}var t=[];var a=this._findMapping(n,this._originalMappings,"originalLine","originalColumn",o.compareByOriginalPositions,i.LEAST_UPPER_BOUND);if(a>=0){var u=this._originalMappings[a];if(e.column===undefined){var s=u.originalLine;while(u&&u.originalLine===s){t.push({line:o.getArg(u,"generatedLine",null),column:o.getArg(u,"generatedColumn",null),lastColumn:o.getArg(u,"lastGeneratedColumn",null)});u=this._originalMappings[++a]}}else{var l=u.originalColumn;while(u&&u.originalLine===r&&u.originalColumn==l){t.push({line:o.getArg(u,"generatedLine",null),column:o.getArg(u,"generatedColumn",null),lastColumn:o.getArg(u,"lastGeneratedColumn",null)});u=this._originalMappings[++a]}}}return t};r.SourceMapConsumer=SourceMapConsumer;function BasicSourceMapConsumer(e,r){var n=e;if(typeof e==="string"){n=o.parseSourceMapInput(e)}var t=o.getArg(n,"version");var i=o.getArg(n,"sources");var u=o.getArg(n,"names",[]);var s=o.getArg(n,"sourceRoot",null);var l=o.getArg(n,"sourcesContent",null);var c=o.getArg(n,"mappings");var p=o.getArg(n,"file",null);if(t!=this._version){throw new Error("Unsupported version: "+t)}if(s){s=o.normalize(s)}i=i.map(String).map(o.normalize).map((function(e){return s&&o.isAbsolute(s)&&o.isAbsolute(e)?o.relative(s,e):e}));this._names=a.fromArray(u.map(String),true);this._sources=a.fromArray(i,true);this._absoluteSources=this._sources.toArray().map((function(e){return o.computeSourceURL(s,e,r)}));this.sourceRoot=s;this.sourcesContent=l;this._mappings=c;this._sourceMapURL=r;this.file=p}BasicSourceMapConsumer.prototype=Object.create(SourceMapConsumer.prototype);BasicSourceMapConsumer.prototype.consumer=SourceMapConsumer;BasicSourceMapConsumer.prototype._findSourceIndex=function(e){var r=e;if(this.sourceRoot!=null){r=o.relative(this.sourceRoot,r)}if(this._sources.has(r)){return this._sources.indexOf(r)}var n;for(n=0;n1){v.source=l+_[1];l+=_[1];v.originalLine=i+_[2];i=v.originalLine;v.originalLine+=1;v.originalColumn=a+_[3];a=v.originalColumn;if(_.length>4){v.name=c+_[4];c+=_[4]}}m.push(v);if(typeof v.originalLine==="number"){d.push(v)}}}s(m,o.compareByGeneratedPositionsDeflated);this.__generatedMappings=m;s(d,o.compareByOriginalPositions);this.__originalMappings=d};BasicSourceMapConsumer.prototype._findMapping=function SourceMapConsumer_findMapping(e,r,n,t,o,a){if(e[n]<=0){throw new TypeError("Line must be greater than or equal to 1, got "+e[n])}if(e[t]<0){throw new TypeError("Column must be greater than or equal to 0, got "+e[t])}return i.search(e,r,o,a)};BasicSourceMapConsumer.prototype.computeColumnSpans=function SourceMapConsumer_computeColumnSpans(){for(var e=0;e=0){var t=this._generatedMappings[n];if(t.generatedLine===r.generatedLine){var i=o.getArg(t,"source",null);if(i!==null){i=this._sources.at(i);i=o.computeSourceURL(this.sourceRoot,i,this._sourceMapURL)}var a=o.getArg(t,"name",null);if(a!==null){a=this._names.at(a)}return{source:i,line:o.getArg(t,"originalLine",null),column:o.getArg(t,"originalColumn",null),name:a}}}return{source:null,line:null,column:null,name:null}};BasicSourceMapConsumer.prototype.hasContentsOfAllSources=function BasicSourceMapConsumer_hasContentsOfAllSources(){if(!this.sourcesContent){return false}return this.sourcesContent.length>=this._sources.size()&&!this.sourcesContent.some((function(e){return e==null}))};BasicSourceMapConsumer.prototype.sourceContentFor=function SourceMapConsumer_sourceContentFor(e,r){if(!this.sourcesContent){return null}var n=this._findSourceIndex(e);if(n>=0){return this.sourcesContent[n]}var t=e;if(this.sourceRoot!=null){t=o.relative(this.sourceRoot,t)}var i;if(this.sourceRoot!=null&&(i=o.urlParse(this.sourceRoot))){var a=t.replace(/^file:\/\//,"");if(i.scheme=="file"&&this._sources.has(a)){return this.sourcesContent[this._sources.indexOf(a)]}if((!i.path||i.path=="/")&&this._sources.has("/"+t)){return this.sourcesContent[this._sources.indexOf("/"+t)]}}if(r){return null}else{throw new Error('"'+t+'" is not in the SourceMap.')}};BasicSourceMapConsumer.prototype.generatedPositionFor=function SourceMapConsumer_generatedPositionFor(e){var r=o.getArg(e,"source");r=this._findSourceIndex(r);if(r<0){return{line:null,column:null,lastColumn:null}}var n={source:r,originalLine:o.getArg(e,"line"),originalColumn:o.getArg(e,"column")};var t=this._findMapping(n,this._originalMappings,"originalLine","originalColumn",o.compareByOriginalPositions,o.getArg(e,"bias",SourceMapConsumer.GREATEST_LOWER_BOUND));if(t>=0){var i=this._originalMappings[t];if(i.source===n.source){return{line:o.getArg(i,"generatedLine",null),column:o.getArg(i,"generatedColumn",null),lastColumn:o.getArg(i,"lastGeneratedColumn",null)}}}return{line:null,column:null,lastColumn:null}};t=BasicSourceMapConsumer;function IndexedSourceMapConsumer(e,r){var n=e;if(typeof e==="string"){n=o.parseSourceMapInput(e)}var t=o.getArg(n,"version");var i=o.getArg(n,"sections");if(t!=this._version){throw new Error("Unsupported version: "+t)}this._sources=new a;this._names=new a;var u={line:-1,column:0};this._sections=i.map((function(e){if(e.url){throw new Error("Support for url field in sections not implemented.")}var n=o.getArg(e,"offset");var t=o.getArg(n,"line");var i=o.getArg(n,"column");if(t{var t=n(215);var o=n(983);var i=n(837).I;var a=n(740).H;function SourceMapGenerator(e){if(!e){e={}}this._file=o.getArg(e,"file",null);this._sourceRoot=o.getArg(e,"sourceRoot",null);this._skipValidation=o.getArg(e,"skipValidation",false);this._sources=new i;this._names=new i;this._mappings=new a;this._sourcesContents=null}SourceMapGenerator.prototype._version=3;SourceMapGenerator.fromSourceMap=function SourceMapGenerator_fromSourceMap(e){var r=e.sourceRoot;var n=new SourceMapGenerator({file:e.file,sourceRoot:r});e.eachMapping((function(e){var t={generated:{line:e.generatedLine,column:e.generatedColumn}};if(e.source!=null){t.source=e.source;if(r!=null){t.source=o.relative(r,t.source)}t.original={line:e.originalLine,column:e.originalColumn};if(e.name!=null){t.name=e.name}}n.addMapping(t)}));e.sources.forEach((function(t){var i=t;if(r!==null){i=o.relative(r,t)}if(!n._sources.has(i)){n._sources.add(i)}var a=e.sourceContentFor(t);if(a!=null){n.setSourceContent(t,a)}}));return n};SourceMapGenerator.prototype.addMapping=function SourceMapGenerator_addMapping(e){var r=o.getArg(e,"generated");var n=o.getArg(e,"original",null);var t=o.getArg(e,"source",null);var i=o.getArg(e,"name",null);if(!this._skipValidation){this._validateMapping(r,n,t,i)}if(t!=null){t=String(t);if(!this._sources.has(t)){this._sources.add(t)}}if(i!=null){i=String(i);if(!this._names.has(i)){this._names.add(i)}}this._mappings.add({generatedLine:r.line,generatedColumn:r.column,originalLine:n!=null&&n.line,originalColumn:n!=null&&n.column,source:t,name:i})};SourceMapGenerator.prototype.setSourceContent=function SourceMapGenerator_setSourceContent(e,r){var n=e;if(this._sourceRoot!=null){n=o.relative(this._sourceRoot,n)}if(r!=null){if(!this._sourcesContents){this._sourcesContents=Object.create(null)}this._sourcesContents[o.toSetString(n)]=r}else if(this._sourcesContents){delete this._sourcesContents[o.toSetString(n)];if(Object.keys(this._sourcesContents).length===0){this._sourcesContents=null}}};SourceMapGenerator.prototype.applySourceMap=function SourceMapGenerator_applySourceMap(e,r,n){var t=r;if(r==null){if(e.file==null){throw new Error("SourceMapGenerator.prototype.applySourceMap requires either an explicit source file, "+'or the source map\'s "file" property. Both were omitted.')}t=e.file}var a=this._sourceRoot;if(a!=null){t=o.relative(a,t)}var u=new i;var s=new i;this._mappings.unsortedForEach((function(r){if(r.source===t&&r.originalLine!=null){var i=e.originalPositionFor({line:r.originalLine,column:r.originalColumn});if(i.source!=null){r.source=i.source;if(n!=null){r.source=o.join(n,r.source)}if(a!=null){r.source=o.relative(a,r.source)}r.originalLine=i.line;r.originalColumn=i.column;if(i.name!=null){r.name=i.name}}}var l=r.source;if(l!=null&&!u.has(l)){u.add(l)}var c=r.name;if(c!=null&&!s.has(c)){s.add(c)}}),this);this._sources=u;this._names=s;e.sources.forEach((function(r){var t=e.sourceContentFor(r);if(t!=null){if(n!=null){r=o.join(n,r)}if(a!=null){r=o.relative(a,r)}this.setSourceContent(r,t)}}),this)};SourceMapGenerator.prototype._validateMapping=function SourceMapGenerator_validateMapping(e,r,n,t){if(r&&typeof r.line!=="number"&&typeof r.column!=="number"){throw new Error("original.line and original.column are not numbers -- you probably meant to omit "+"the original mapping entirely and only map the generated position. If so, pass "+"null for the original mapping instead of an object with empty or null values.")}if(e&&"line"in e&&"column"in e&&e.line>0&&e.column>=0&&!r&&!n&&!t){return}else if(e&&"line"in e&&"column"in e&&r&&"line"in r&&"column"in r&&e.line>0&&e.column>=0&&r.line>0&&r.column>=0&&n){return}else{throw new Error("Invalid mapping: "+JSON.stringify({generated:e,source:n,original:r,name:t}))}};SourceMapGenerator.prototype._serializeMappings=function SourceMapGenerator_serializeMappings(){var e=0;var r=1;var n=0;var i=0;var a=0;var u=0;var s="";var l;var c;var p;var f;var g=this._mappings.toArray();for(var h=0,d=g.length;h0){if(!o.compareByGeneratedPositionsInflated(c,g[h-1])){continue}l+=","}}l+=t.encode(c.generatedColumn-e);e=c.generatedColumn;if(c.source!=null){f=this._sources.indexOf(c.source);l+=t.encode(f-u);u=f;l+=t.encode(c.originalLine-1-i);i=c.originalLine-1;l+=t.encode(c.originalColumn-n);n=c.originalColumn;if(c.name!=null){p=this._names.indexOf(c.name);l+=t.encode(p-a);a=p}}s+=l}return s};SourceMapGenerator.prototype._generateSourcesContent=function SourceMapGenerator_generateSourcesContent(e,r){return e.map((function(e){if(!this._sourcesContents){return null}if(r!=null){e=o.relative(r,e)}var n=o.toSetString(e);return Object.prototype.hasOwnProperty.call(this._sourcesContents,n)?this._sourcesContents[n]:null}),this)};SourceMapGenerator.prototype.toJSON=function SourceMapGenerator_toJSON(){var e={version:this._version,sources:this._sources.toArray(),names:this._names.toArray(),mappings:this._serializeMappings()};if(this._file!=null){e.file=this._file}if(this._sourceRoot!=null){e.sourceRoot=this._sourceRoot}if(this._sourcesContents){e.sourcesContent=this._generateSourcesContent(e.sources,e.sourceRoot)}return e};SourceMapGenerator.prototype.toString=function SourceMapGenerator_toString(){return JSON.stringify(this.toJSON())};r.h=SourceMapGenerator},990:(e,r,n)=>{var t;var o=n(341).h;var i=n(983);var a=/(\r?\n)/;var u=10;var s="$$$isSourceNode$$$";function SourceNode(e,r,n,t,o){this.children=[];this.sourceContents={};this.line=e==null?null:e;this.column=r==null?null:r;this.source=n==null?null:n;this.name=o==null?null:o;this[s]=true;if(t!=null)this.add(t)}SourceNode.fromStringWithSourceMap=function SourceNode_fromStringWithSourceMap(e,r,n){var t=new SourceNode;var o=e.split(a);var u=0;var shiftNextLine=function(){var e=getNextLine();var r=getNextLine()||"";return e+r;function getNextLine(){return u=0;r--){this.prepend(e[r])}}else if(e[s]||typeof e==="string"){this.children.unshift(e)}else{throw new TypeError("Expected a SourceNode, string, or an array of SourceNodes and strings. Got "+e)}return this};SourceNode.prototype.walk=function SourceNode_walk(e){var r;for(var n=0,t=this.children.length;n0){r=[];for(n=0;n{function getArg(e,r,n){if(r in e){return e[r]}else if(arguments.length===3){return n}else{throw new Error('"'+r+'" is a required argument.')}}r.getArg=getArg;var n=/^(?:([\w+\-.]+):)?\/\/(?:(\w+:\w+)@)?([\w.-]*)(?::(\d+))?(.*)$/;var t=/^data:.+\,.+$/;function urlParse(e){var r=e.match(n);if(!r){return null}return{scheme:r[1],auth:r[2],host:r[3],port:r[4],path:r[5]}}r.urlParse=urlParse;function urlGenerate(e){var r="";if(e.scheme){r+=e.scheme+":"}r+="//";if(e.auth){r+=e.auth+"@"}if(e.host){r+=e.host}if(e.port){r+=":"+e.port}if(e.path){r+=e.path}return r}r.urlGenerate=urlGenerate;function normalize(e){var n=e;var t=urlParse(e);if(t){if(!t.path){return e}n=t.path}var o=r.isAbsolute(n);var i=n.split(/\/+/);for(var a,u=0,s=i.length-1;s>=0;s--){a=i[s];if(a==="."){i.splice(s,1)}else if(a===".."){u++}else if(u>0){if(a===""){i.splice(s+1,u);u=0}else{i.splice(s,2);u--}}}n=i.join("/");if(n===""){n=o?"/":"."}if(t){t.path=n;return urlGenerate(t)}return n}r.normalize=normalize;function join(e,r){if(e===""){e="."}if(r===""){r="."}var n=urlParse(r);var o=urlParse(e);if(o){e=o.path||"/"}if(n&&!n.scheme){if(o){n.scheme=o.scheme}return urlGenerate(n)}if(n||r.match(t)){return r}if(o&&!o.host&&!o.path){o.host=r;return urlGenerate(o)}var i=r.charAt(0)==="/"?r:normalize(e.replace(/\/+$/,"")+"/"+r);if(o){o.path=i;return urlGenerate(o)}return i}r.join=join;r.isAbsolute=function(e){return e.charAt(0)==="/"||n.test(e)};function relative(e,r){if(e===""){e="."}e=e.replace(/\/$/,"");var n=0;while(r.indexOf(e+"/")!==0){var t=e.lastIndexOf("/");if(t<0){return r}e=e.slice(0,t);if(e.match(/^([^\/]+:\/)?\/*$/)){return r}++n}return Array(n+1).join("../")+r.substr(e.length+1)}r.relative=relative;var o=function(){var e=Object.create(null);return!("__proto__"in e)}();function identity(e){return e}function toSetString(e){if(isProtoString(e)){return"$"+e}return e}r.toSetString=o?identity:toSetString;function fromSetString(e){if(isProtoString(e)){return e.slice(1)}return e}r.fromSetString=o?identity:fromSetString;function isProtoString(e){if(!e){return false}var r=e.length;if(r<9){return false}if(e.charCodeAt(r-1)!==95||e.charCodeAt(r-2)!==95||e.charCodeAt(r-3)!==111||e.charCodeAt(r-4)!==116||e.charCodeAt(r-5)!==111||e.charCodeAt(r-6)!==114||e.charCodeAt(r-7)!==112||e.charCodeAt(r-8)!==95||e.charCodeAt(r-9)!==95){return false}for(var n=r-10;n>=0;n--){if(e.charCodeAt(n)!==36){return false}}return true}function compareByOriginalPositions(e,r,n){var t=strcmp(e.source,r.source);if(t!==0){return t}t=e.originalLine-r.originalLine;if(t!==0){return t}t=e.originalColumn-r.originalColumn;if(t!==0||n){return t}t=e.generatedColumn-r.generatedColumn;if(t!==0){return t}t=e.generatedLine-r.generatedLine;if(t!==0){return t}return strcmp(e.name,r.name)}r.compareByOriginalPositions=compareByOriginalPositions;function compareByGeneratedPositionsDeflated(e,r,n){var t=e.generatedLine-r.generatedLine;if(t!==0){return t}t=e.generatedColumn-r.generatedColumn;if(t!==0||n){return t}t=strcmp(e.source,r.source);if(t!==0){return t}t=e.originalLine-r.originalLine;if(t!==0){return t}t=e.originalColumn-r.originalColumn;if(t!==0){return t}return strcmp(e.name,r.name)}r.compareByGeneratedPositionsDeflated=compareByGeneratedPositionsDeflated;function strcmp(e,r){if(e===r){return 0}if(e===null){return 1}if(r===null){return-1}if(e>r){return 1}return-1}function compareByGeneratedPositionsInflated(e,r){var n=e.generatedLine-r.generatedLine;if(n!==0){return n}n=e.generatedColumn-r.generatedColumn;if(n!==0){return n}n=strcmp(e.source,r.source);if(n!==0){return n}n=e.originalLine-r.originalLine;if(n!==0){return n}n=e.originalColumn-r.originalColumn;if(n!==0){return n}return strcmp(e.name,r.name)}r.compareByGeneratedPositionsInflated=compareByGeneratedPositionsInflated;function parseSourceMapInput(e){return JSON.parse(e.replace(/^\)]}'[^\n]*\n/,""))}r.parseSourceMapInput=parseSourceMapInput;function computeSourceURL(e,r,n){r=r||"";if(e){if(e[e.length-1]!=="/"&&r[0]!=="/"){e+="/"}r=e+r}if(n){var t=urlParse(n);if(!t){throw new Error("sourceMapURL could not be parsed")}if(t.path){var o=t.path.lastIndexOf("/");if(o>=0){t.path=t.path.substring(0,o+1)}}r=join(urlGenerate(t),r)}return normalize(r)}r.computeSourceURL=computeSourceURL},596:(e,r,n)=>{n(341).h;r.SourceMapConsumer=n(327).SourceMapConsumer;n(990)},147:e=>{"use strict";e.exports=require("fs")},17:e=>{"use strict";e.exports=require("path")}};var r={};function __webpack_require__(n){var t=r[n];if(t!==undefined){return t.exports}var o=r[n]={id:n,loaded:false,exports:{}};var i=true;try{e[n](o,o.exports,__webpack_require__);i=false}finally{if(i)delete r[n]}o.loaded=true;return o.exports}(()=>{__webpack_require__.nmd=e=>{e.paths=[];if(!e.children)e.children=[];return e}})();if(typeof __webpack_require__!=="undefined")__webpack_require__.ab=__dirname+"/";var n={};(()=>{__webpack_require__(284).install()})();module.exports=n})(); \ No newline at end of file diff --git a/package.json b/package.json index 3ae39780..d6b16f04 100644 --- a/package.json +++ b/package.json @@ -3,12 +3,13 @@ "version": "1.1.0", "private": true, "description": "An action which manages a github release", - "main": "lib/main.js", + "main": "dist/index.js", "scripts": { "build": "tsc", "clean": "rm -rf lib/*", "debug": "yarn clean && yarn install && yarn build", - "release": "yarn clean && yarn install --production && yarn build", + "package": "ncc build --source-map --license licenses.txt", + "release": "yarn clean && yarn install --production && yarn build && yarn package", "test": "jest" }, "repository": {